var/home/core/zuul-output/0000755000175000017500000000000015066554117014537 5ustar corecorevar/home/core/zuul-output/logs/0000755000175000017500000000000015066565071015504 5ustar corecorevar/home/core/zuul-output/logs/kubelet.log0000644000000000000000004573116215066565063017722 0ustar rootrootSep 29 19:08:07 crc systemd[1]: Starting Kubernetes Kubelet... Sep 29 19:08:08 crc restorecon[4663]: Relabeled /var/lib/kubelet/config.json from system_u:object_r:unlabeled_t:s0 to system_u:object_r:container_var_lib_t:s0 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/device-plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/device-plugins/kubelet.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/volumes/kubernetes.io~configmap/nginx-conf/..2025_02_23_05_40_35.4114275528/nginx.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/22e96971 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/21c98286 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/0f1869e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/46889d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/5b6a5969 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/6c7921f5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4804f443 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/2a46b283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/a6b5573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4f88ee5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/5a4eee4b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/cd87c521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/38602af4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/1483b002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/0346718b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/d3ed4ada not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/3bb473a5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/8cd075a9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/00ab4760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/54a21c09 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/70478888 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/43802770 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/955a0edc not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/bca2d009 not reset as customized by admin to system_u:object_r:container_file_t:s0:c140,c1009 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/b295f9bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/bc46ea27 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5731fc1b not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5e1b2a3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/943f0936 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/3f764ee4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/8695e3f9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/aed7aa86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/c64d7448 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/0ba16bd2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/207a939f not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/54aa8cdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/1f5fa595 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/bf9c8153 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/47fba4ea not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/7ae55ce9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7906a268 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/ce43fa69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7fc7ea3a not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/d8c38b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/9ef015fb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/b9db6a41 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/b1733d79 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/afccd338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/9df0a185 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/18938cf8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/7ab4eb23 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/56930be6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_35.630010865 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/0d8e3722 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/d22b2e76 not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/e036759f not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/2734c483 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/57878fe7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/3f3c2e58 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/375bec3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/7bc41e08 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/48c7a72d not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/4b66701f not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/a5a1c202 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_40.1388695756 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/26f3df5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/6d8fb21d not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/50e94777 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208473b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/ec9e08ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3b787c39 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208eaed5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/93aa3a2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3c697968 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/ba950ec9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/cb5cdb37 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/f2df9827 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/fedaa673 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/9ca2df95 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/b2d7460e not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2207853c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/241c1c29 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2d910eaf not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/c6c0f2e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/399edc97 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8049f7cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/0cec5484 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/312446d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c406,c828 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8e56a35d not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/2d30ddb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/eca8053d not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/c3a25c9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c168,c522 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/b9609c22 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/e8b0eca9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/b36a9c3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/38af7b07 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/ae821620 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/baa23338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/2c534809 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/59b29eae not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/c91a8e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/4d87494a not reset as customized by admin to system_u:object_r:container_file_t:s0:c442,c857 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/1e33ca63 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/8dea7be2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d0b04a99 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d84f01e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/4109059b not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/a7258a3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/05bdf2b6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/f3261b51 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/315d045e not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/5fdcf278 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/d053f757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/c2850dc7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fcfb0b2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c7ac9b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fa0c0d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c609b6ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/2be6c296 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/89a32653 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/4eb9afeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/13af6efa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/b03f9724 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/e3d105cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/3aed4d83 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/0765fa6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/2cefc627 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/3dcc6345 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/365af391 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b1130c0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/236a5913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b9432e26 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/5ddb0e3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/986dc4fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/8a23ff9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/9728ae68 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/665f31d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/136c9b42 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/98a1575b not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/cac69136 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/5deb77a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/2ae53400 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/e46f2326 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/dc688d3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/3497c3cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/177eb008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/af5a2afa not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/d780cb1f not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/49b0f374 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/26fbb125 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/cf14125a not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/b7f86972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/e51d739c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/88ba6a69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/669a9acf not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/5cd51231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/75349ec7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/15c26839 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/45023dcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/2bb66a50 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/64d03bdd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/ab8e7ca0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/bb9be25f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/9a0b61d3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/d471b9d2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/8cb76b8e not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/11a00840 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/ec355a92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/992f735e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d59cdbbc not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/72133ff0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/c56c834c not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d13724c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/0a498258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa471982 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fc900d92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa7d68da not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/4bacf9b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/424021b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/fc2e31a3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/f51eefac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/c8997f2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/7481f599 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/fdafea19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/d0e1c571 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/ee398915 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/682bb6b8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a3e67855 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a989f289 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/915431bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/7796fdab not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/dcdb5f19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/a3aaa88c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/5508e3e6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/160585de not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/e99f8da3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/8bc85570 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/a5861c91 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/84db1135 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/9e1a6043 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/c1aba1c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/d55ccd6d not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/971cc9f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/8f2e3dcf not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/ceb35e9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/1c192745 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/5209e501 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/f83de4df not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/e7b978ac not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/c64304a1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/5384386b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/cce3e3ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/8fb75465 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/740f573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/32fd1134 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/0a861bd3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/80363026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/bfa952a8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..2025_02_23_05_33_31.333075221 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/793bf43d not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/7db1bb6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/4f6a0368 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/c12c7d86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/36c4a773 not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/4c1e98ae not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/a4c8115c not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/setup/7db1802e not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver/a008a7ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-syncer/2c836bac not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-regeneration-controller/0ce62299 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-insecure-readyz/945d2457 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-check-endpoints/7d5c1dd8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/index.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/bundle-v1.15.0.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/channel.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/package.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/bc8d0691 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/6b76097a not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/34d1af30 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/312ba61c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/645d5dd1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/16e825f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/4cf51fc9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/2a23d348 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/075dbd49 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/dd585ddd not reset as customized by admin to system_u:object_r:container_file_t:s0:c377,c642 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/17ebd0ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c343 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/005579f4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_23_11.1287037894 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/bf5f3b9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/af276eb7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/ea28e322 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/692e6683 not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/871746a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/4eb2e958 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/ca9b62da not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/0edd6fce not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/containers/controller-manager/89b4555f not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/655fcd71 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/0d43c002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/e68efd17 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/9acf9b65 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/5ae3ff11 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/1e59206a not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/27af16d1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c304,c1017 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/7918e729 not reset as customized by admin to system_u:object_r:container_file_t:s0:c853,c893 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/5d976d0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c585,c981 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/d7f55cbb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/f0812073 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/1a56cbeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/7fdd437e not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/cdfb5652 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/fix-audit-permissions/fb93119e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver/f1e8fc0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver-check-endpoints/218511f3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server/serving-certs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/ca8af7b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/72cc8a75 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/6e8a3760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4c3455c0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/2278acb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4b453e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/3ec09bda not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2/cacerts.bin not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java/cacerts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl/ca-bundle.trust.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/email-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/objsign-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2ae6433e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fde84897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75680d2e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/openshift-service-serving-signer_1740288168.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/facfc4fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f5a969c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CFCA_EV_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9ef4a08a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ingress-operator_1740288202.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2f332aed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/248c8271.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d10a21f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ACCVRAIZ1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a94d09e5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c9a4d3b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40193066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd8c0d63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b936d1c6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CA_Disig_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4fd49c6c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM_SERVIDORES_SEGUROS.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b81b93f0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f9a69fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b30d5fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ANF_Secure_Server_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b433981b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93851c9e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9282e51c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7dd1bc4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Actalis_Authentication_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/930ac5d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f47b495.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e113c810.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5931b5bc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Commercial.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2b349938.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e48193cf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/302904dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a716d4ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Networking.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93bc0acc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/86212b19.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b727005e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbc54cab.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f51bb24c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c28a8a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9c8dfbd4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ccc52f49.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cb1c3204.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ce5e74ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd08c599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6d41d539.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb5fa911.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e35234b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8cb5ee0f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a7c655d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f8fc53da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/de6d66f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d41b5e2a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/41a3f684.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1df5a75f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_2011.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e36a6752.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b872f2b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9576d26b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/228f89db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_ECC_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb717492.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d21b73c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b1b94ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/595e996b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_RSA_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b46e03d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/128f4b91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_3_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81f2d2b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Autoridad_de_Certificacion_Firmaprofesional_CIF_A62634068.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3bde41ac.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d16a5865.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_EC-384_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0179095f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ffa7f1eb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9482e63a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4dae3dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e359ba6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7e067d03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/95aff9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7746a63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Baltimore_CyberTrust_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/653b494a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3ad48a91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_2_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/54657681.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/82223c44.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8de2f56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d9dafe4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d96b65e2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee64a828.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40547a79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5a3f0ff8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a780d93.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/34d996fb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/eed8c118.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/89c02a45.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b1159c4c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d6325660.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4c339cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8312c4c1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_E1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8508e720.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5fdd185d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48bec511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/69105f4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b9bc432.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/32888f65.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b03dec0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/219d9499.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5acf816d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbf06781.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc99f41e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AAA_Certificate_Services.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/985c1f52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8794b4e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_BR_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7c037b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ef954a4e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_EV_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2add47b6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/90c5a3c8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0f3e76e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/53a1b57a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_EV_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5ad8a5d6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/68dd7389.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d04f354.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d6437c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/062cdee6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bd43e1dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7f3d5d1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c491639e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3513523f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/399e7759.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/feffd413.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d18e9066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/607986c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c90bc37d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1b0f7e5c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e08bfd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dd8e9d41.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed39abd0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a3418fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bc3f2570.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_High_Assurance_EV_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/244b5494.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81b9768f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4be590e0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_ECC_P384_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9846683b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/252252d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e8e7201.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_RSA4096_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d52c538d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c44cc0c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Trusted_Root_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75d1b2ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a2c66da8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ecccd8db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust.net_Certification_Authority__2048_.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/aee5f10d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e7271e8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0e59380.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4c3982f2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b99d060.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf64f35b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0a775a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/002c0b4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cc450945.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_EC1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/106f3e4d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b3fb433b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4042bcee.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/02265526.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/455f1b52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0d69c7e1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9f727ac7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5e98733a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0cd152c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc4d6a89.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6187b673.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/FIRMAPROFESIONAL_CA_ROOT-A_WEB.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ba8887ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/068570d1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f081611a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48a195d8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GDCA_TrustAUTH_R5_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f6fa695.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab59055e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b92fd57f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GLOBALTRUST_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fa5da96b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ec40989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7719f463.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1001acf7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f013ecaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/626dceaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c559d742.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1d3472b9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9479c8c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a81e292b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4bfab552.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e071171e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/57bcb2da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_ECC_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab5346f4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5046c355.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_RSA_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/865fbdf9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da0cfd1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/85cde254.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_ECC_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbb3f32b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureSign_RootCA11.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5860aaa6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/31188b5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HiPKI_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c7f1359b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f15c80c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hongkong_Post_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/09789157.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/18856ac4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e09d511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Commercial_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cf701eeb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d06393bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Public_Sector_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/10531352.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Izenpe.com.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureTrust_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0ed035a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsec_e-Szigno_Root_CA_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8160b96c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8651083.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2c63f966.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_ECC_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d89cda1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/01419da9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_RSA_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7a5b843.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_RSA_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf53fb88.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9591a472.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3afde786.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Gold_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NAVER_Global_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3fb36b73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d39b0a2c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a89d74c2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd58d51e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7db1890.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NetLock_Arany__Class_Gold__F__tan__s__tv__ny.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/988a38cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/60afe812.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f39fc864.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5443e9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GB_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e73d606e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dfc0fe80.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b66938e9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e1eab7c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GC_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/773e07ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c899c73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d59297b8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ddcda989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_1_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/749e9e03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/52b525c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7e8dc79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a819ef2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/08063a00.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b483515.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/064e0aa9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1f58a078.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6f7454b3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7fa05551.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76faf6c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9339512a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f387163d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee37c333.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e18bfb83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e442e424.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fe8a2cd8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/23f4c490.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5cd81ad7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0c70a8d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7892ad52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SZAFIR_ROOT_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4f316efb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_RSA_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/06dc52d5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/583d0756.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0bf05006.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/88950faa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9046744a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c860d51.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_RSA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6fa5da56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/33ee480d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Secure_Global_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/63a2c897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_ECC_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bdacca6f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ff34af3f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbff3a01.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_ECC_RootCA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_C1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/406c9bb1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_C3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Services_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Silver_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/99e1b953.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/14bc7599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TUBITAK_Kamu_SM_SSL_Kok_Sertifikasi_-_Surum_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a3adc42.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f459871d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_ECC_Root_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_RSA_Root_2023.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TeliaSonera_Root_CA_v1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telia_Root_CA_v2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f103249.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f058632f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-certificates.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9bf03295.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/98aaf404.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1cef98f5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/073bfcc5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2923b3f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f249de83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/edcbddb5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P256_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b5697b0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ae85e5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b74d2bd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P384_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d887a5bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9aef356c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TunTrust_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd64f3fc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e13665f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Extended_Validation_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f5dc4f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da7377f6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Global_G2_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c01eb047.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/304d27c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed858448.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f30dd6ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/04f60c28.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_ECC_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fc5a8f99.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/35105088.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee532fd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/XRamp_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/706f604c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76579174.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d86cdd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/882de061.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f618aec.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a9d40e02.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e-Szigno_Root_CA_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e868b802.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/83e9984f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ePKI_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca6e4ad9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d6523ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4b718d9b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/869fbf79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/containers/registry/f8d22bdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/6e8bbfac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/54dd7996 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/a4f1bb05 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/207129da not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/c1df39e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/15b8f1cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/77bd6913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/2382c1b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/704ce128 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/70d16fe0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/bfb95535 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/57a8e8e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/1b9d3e5e not reset as customized by admin to system_u:object_r:container_file_t:s0:c107,c917 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/fddb173c not reset as customized by admin to system_u:object_r:container_file_t:s0:c202,c983 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/95d3c6c4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/bfb5fff5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/2aef40aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/c0391cad not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/1119e69d not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/660608b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/8220bd53 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/85f99d5c not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/4b0225f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/9c2a3394 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/e820b243 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/1ca52ea0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/e6988e45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/6655f00b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/98bc3986 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/08e3458a not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/2a191cb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/6c4eeefb not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/f61a549c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/24891863 not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/fbdfd89c not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/9b63b3bc not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/8acde6d6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/node-driver-registrar/59ecbba3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/csi-provisioner/685d4be3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/containers/route-controller-manager/feaea55e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/63709497 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/d966b7fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/f5773757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/81c9edb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/57bf57ee not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/86f5e6aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/0aabe31d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/d2af85c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/09d157d9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c0fe7256 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c30319e4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/e6b1dd45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/2bb643f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/920de426 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/70fa1e87 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/a1c12a2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/9442e6c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/5b45ec72 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/3c9f3a59 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/1091c11b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/9a6821c6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/ec0c35e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/517f37e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/6214fe78 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/ba189c8b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/351e4f31 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/c0f219ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/8069f607 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/559c3d82 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/605ad488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/148df488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/3bf6dcb4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/022a2feb not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/938c3924 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/729fe23e not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/1fd5cbd4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/a96697e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/e155ddca not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/10dd0e0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/6f2c8392 not reset as customized by admin to system_u:object_r:container_file_t:s0:c267,c588 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/bd241ad9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/plugins/csi-hostpath not reset as customized by admin to system_u:object_r:container_file_t:s0 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/plugins/csi-hostpath/csi.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/plugins/kubernetes.io not reset as customized by admin to system_u:object_r:container_file_t:s0 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/plugins/kubernetes.io/csi not reset as customized by admin to system_u:object_r:container_file_t:s0 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983 not reset as customized by admin to system_u:object_r:container_file_t:s0 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount not reset as customized by admin to system_u:object_r:container_file_t:s0 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/vol_data.json not reset as customized by admin to system_u:object_r:container_file_t:s0 Sep 29 19:08:08 crc restorecon[4663]: /var/lib/kubelet/plugins_registry not reset as customized by admin to system_u:object_r:container_file_t:s0 Sep 29 19:08:08 crc restorecon[4663]: Relabeled /var/usrlocal/bin/kubenswrapper from system_u:object_r:bin_t:s0 to system_u:object_r:kubelet_exec_t:s0 Sep 29 19:08:09 crc kubenswrapper[4779]: Flag --container-runtime-endpoint has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Sep 29 19:08:09 crc kubenswrapper[4779]: Flag --minimum-container-ttl-duration has been deprecated, Use --eviction-hard or --eviction-soft instead. Will be removed in a future version. Sep 29 19:08:09 crc kubenswrapper[4779]: Flag --volume-plugin-dir has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Sep 29 19:08:09 crc kubenswrapper[4779]: Flag --register-with-taints has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Sep 29 19:08:09 crc kubenswrapper[4779]: Flag --pod-infra-container-image has been deprecated, will be removed in a future release. Image garbage collector will get sandbox image information from CRI. Sep 29 19:08:09 crc kubenswrapper[4779]: Flag --system-reserved has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.548405 4779 server.go:211] "--pod-infra-container-image will not be pruned by the image garbage collector in kubelet and should also be set in the remote runtime" Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.553643 4779 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.553703 4779 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.553719 4779 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.553730 4779 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.553741 4779 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.553751 4779 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.553762 4779 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.553772 4779 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.553782 4779 feature_gate.go:330] unrecognized feature gate: PlatformOperators Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.553824 4779 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.553839 4779 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.553852 4779 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.553864 4779 feature_gate.go:330] unrecognized feature gate: InsightsConfig Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.553874 4779 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.553883 4779 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.553894 4779 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.553904 4779 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.553914 4779 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.553924 4779 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.553934 4779 feature_gate.go:330] unrecognized feature gate: SignatureStores Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.553972 4779 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.553983 4779 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.553994 4779 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.554004 4779 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.554014 4779 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.554023 4779 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.554033 4779 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.554044 4779 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.554057 4779 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.554070 4779 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.554084 4779 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.554099 4779 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.554112 4779 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.554124 4779 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.554134 4779 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.554147 4779 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.554159 4779 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.554170 4779 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.554181 4779 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.554192 4779 feature_gate.go:330] unrecognized feature gate: Example Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.554201 4779 feature_gate.go:330] unrecognized feature gate: OVNObservability Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.554212 4779 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.554223 4779 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.554233 4779 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.554242 4779 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.554252 4779 feature_gate.go:330] unrecognized feature gate: NewOLM Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.554262 4779 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.554272 4779 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.554282 4779 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.554295 4779 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.554304 4779 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.554314 4779 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.554364 4779 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.554377 4779 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.554388 4779 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.554398 4779 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.554409 4779 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.554419 4779 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.554432 4779 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.554442 4779 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.554452 4779 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.554463 4779 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.554473 4779 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.554486 4779 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.554498 4779 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.554509 4779 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.554519 4779 feature_gate.go:330] unrecognized feature gate: GatewayAPI Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.555526 4779 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.555553 4779 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.555568 4779 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.555582 4779 feature_gate.go:330] unrecognized feature gate: PinnedImages Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.556590 4779 flags.go:64] FLAG: --address="0.0.0.0" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.556621 4779 flags.go:64] FLAG: --allowed-unsafe-sysctls="[]" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.556644 4779 flags.go:64] FLAG: --anonymous-auth="true" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.556660 4779 flags.go:64] FLAG: --application-metrics-count-limit="100" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.556675 4779 flags.go:64] FLAG: --authentication-token-webhook="false" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.556688 4779 flags.go:64] FLAG: --authentication-token-webhook-cache-ttl="2m0s" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.556703 4779 flags.go:64] FLAG: --authorization-mode="AlwaysAllow" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.556717 4779 flags.go:64] FLAG: --authorization-webhook-cache-authorized-ttl="5m0s" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.556729 4779 flags.go:64] FLAG: --authorization-webhook-cache-unauthorized-ttl="30s" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.556741 4779 flags.go:64] FLAG: --boot-id-file="/proc/sys/kernel/random/boot_id" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.556753 4779 flags.go:64] FLAG: --bootstrap-kubeconfig="/etc/kubernetes/kubeconfig" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.556766 4779 flags.go:64] FLAG: --cert-dir="/var/lib/kubelet/pki" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.556778 4779 flags.go:64] FLAG: --cgroup-driver="cgroupfs" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.556790 4779 flags.go:64] FLAG: --cgroup-root="" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.556801 4779 flags.go:64] FLAG: --cgroups-per-qos="true" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.556813 4779 flags.go:64] FLAG: --client-ca-file="" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.556824 4779 flags.go:64] FLAG: --cloud-config="" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.556836 4779 flags.go:64] FLAG: --cloud-provider="" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.556847 4779 flags.go:64] FLAG: --cluster-dns="[]" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.556861 4779 flags.go:64] FLAG: --cluster-domain="" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.556872 4779 flags.go:64] FLAG: --config="/etc/kubernetes/kubelet.conf" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.556885 4779 flags.go:64] FLAG: --config-dir="" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.556895 4779 flags.go:64] FLAG: --container-hints="/etc/cadvisor/container_hints.json" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.556908 4779 flags.go:64] FLAG: --container-log-max-files="5" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.556924 4779 flags.go:64] FLAG: --container-log-max-size="10Mi" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.556935 4779 flags.go:64] FLAG: --container-runtime-endpoint="/var/run/crio/crio.sock" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.556947 4779 flags.go:64] FLAG: --containerd="/run/containerd/containerd.sock" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.556961 4779 flags.go:64] FLAG: --containerd-namespace="k8s.io" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.556973 4779 flags.go:64] FLAG: --contention-profiling="false" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.556985 4779 flags.go:64] FLAG: --cpu-cfs-quota="true" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.556996 4779 flags.go:64] FLAG: --cpu-cfs-quota-period="100ms" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.557009 4779 flags.go:64] FLAG: --cpu-manager-policy="none" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.557022 4779 flags.go:64] FLAG: --cpu-manager-policy-options="" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.557037 4779 flags.go:64] FLAG: --cpu-manager-reconcile-period="10s" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.557049 4779 flags.go:64] FLAG: --enable-controller-attach-detach="true" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.557061 4779 flags.go:64] FLAG: --enable-debugging-handlers="true" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.557072 4779 flags.go:64] FLAG: --enable-load-reader="false" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.557084 4779 flags.go:64] FLAG: --enable-server="true" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.557096 4779 flags.go:64] FLAG: --enforce-node-allocatable="[pods]" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.557112 4779 flags.go:64] FLAG: --event-burst="100" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.557124 4779 flags.go:64] FLAG: --event-qps="50" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.557135 4779 flags.go:64] FLAG: --event-storage-age-limit="default=0" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.557147 4779 flags.go:64] FLAG: --event-storage-event-limit="default=0" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.557158 4779 flags.go:64] FLAG: --eviction-hard="" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.557173 4779 flags.go:64] FLAG: --eviction-max-pod-grace-period="0" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.557185 4779 flags.go:64] FLAG: --eviction-minimum-reclaim="" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.557196 4779 flags.go:64] FLAG: --eviction-pressure-transition-period="5m0s" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.557208 4779 flags.go:64] FLAG: --eviction-soft="" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.557219 4779 flags.go:64] FLAG: --eviction-soft-grace-period="" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.557231 4779 flags.go:64] FLAG: --exit-on-lock-contention="false" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.557243 4779 flags.go:64] FLAG: --experimental-allocatable-ignore-eviction="false" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.557254 4779 flags.go:64] FLAG: --experimental-mounter-path="" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.557265 4779 flags.go:64] FLAG: --fail-cgroupv1="false" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.557276 4779 flags.go:64] FLAG: --fail-swap-on="true" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.557288 4779 flags.go:64] FLAG: --feature-gates="" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.557302 4779 flags.go:64] FLAG: --file-check-frequency="20s" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.557314 4779 flags.go:64] FLAG: --global-housekeeping-interval="1m0s" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.557367 4779 flags.go:64] FLAG: --hairpin-mode="promiscuous-bridge" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.557379 4779 flags.go:64] FLAG: --healthz-bind-address="127.0.0.1" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.557393 4779 flags.go:64] FLAG: --healthz-port="10248" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.557406 4779 flags.go:64] FLAG: --help="false" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.557418 4779 flags.go:64] FLAG: --hostname-override="" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.557429 4779 flags.go:64] FLAG: --housekeeping-interval="10s" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.557440 4779 flags.go:64] FLAG: --http-check-frequency="20s" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.557452 4779 flags.go:64] FLAG: --image-credential-provider-bin-dir="" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.557464 4779 flags.go:64] FLAG: --image-credential-provider-config="" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.557475 4779 flags.go:64] FLAG: --image-gc-high-threshold="85" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.557486 4779 flags.go:64] FLAG: --image-gc-low-threshold="80" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.557500 4779 flags.go:64] FLAG: --image-service-endpoint="" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.557511 4779 flags.go:64] FLAG: --kernel-memcg-notification="false" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.557521 4779 flags.go:64] FLAG: --kube-api-burst="100" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.557534 4779 flags.go:64] FLAG: --kube-api-content-type="application/vnd.kubernetes.protobuf" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.557546 4779 flags.go:64] FLAG: --kube-api-qps="50" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.557557 4779 flags.go:64] FLAG: --kube-reserved="" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.557569 4779 flags.go:64] FLAG: --kube-reserved-cgroup="" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.557580 4779 flags.go:64] FLAG: --kubeconfig="/var/lib/kubelet/kubeconfig" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.557593 4779 flags.go:64] FLAG: --kubelet-cgroups="" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.557604 4779 flags.go:64] FLAG: --local-storage-capacity-isolation="true" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.557615 4779 flags.go:64] FLAG: --lock-file="" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.557626 4779 flags.go:64] FLAG: --log-cadvisor-usage="false" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.557659 4779 flags.go:64] FLAG: --log-flush-frequency="5s" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.557672 4779 flags.go:64] FLAG: --log-json-info-buffer-size="0" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.557692 4779 flags.go:64] FLAG: --log-json-split-stream="false" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.557704 4779 flags.go:64] FLAG: --log-text-info-buffer-size="0" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.557754 4779 flags.go:64] FLAG: --log-text-split-stream="false" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.557768 4779 flags.go:64] FLAG: --logging-format="text" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.557779 4779 flags.go:64] FLAG: --machine-id-file="/etc/machine-id,/var/lib/dbus/machine-id" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.557792 4779 flags.go:64] FLAG: --make-iptables-util-chains="true" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.557804 4779 flags.go:64] FLAG: --manifest-url="" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.557815 4779 flags.go:64] FLAG: --manifest-url-header="" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.557831 4779 flags.go:64] FLAG: --max-housekeeping-interval="15s" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.557844 4779 flags.go:64] FLAG: --max-open-files="1000000" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.557859 4779 flags.go:64] FLAG: --max-pods="110" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.557871 4779 flags.go:64] FLAG: --maximum-dead-containers="-1" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.557884 4779 flags.go:64] FLAG: --maximum-dead-containers-per-container="1" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.557895 4779 flags.go:64] FLAG: --memory-manager-policy="None" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.557906 4779 flags.go:64] FLAG: --minimum-container-ttl-duration="6m0s" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.557918 4779 flags.go:64] FLAG: --minimum-image-ttl-duration="2m0s" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.557930 4779 flags.go:64] FLAG: --node-ip="192.168.126.11" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.557944 4779 flags.go:64] FLAG: --node-labels="node-role.kubernetes.io/control-plane=,node-role.kubernetes.io/master=,node.openshift.io/os_id=rhcos" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.557971 4779 flags.go:64] FLAG: --node-status-max-images="50" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.557983 4779 flags.go:64] FLAG: --node-status-update-frequency="10s" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.557995 4779 flags.go:64] FLAG: --oom-score-adj="-999" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.558007 4779 flags.go:64] FLAG: --pod-cidr="" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.558021 4779 flags.go:64] FLAG: --pod-infra-container-image="quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:33549946e22a9ffa738fd94b1345f90921bc8f92fa6137784cb33c77ad806f9d" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.558037 4779 flags.go:64] FLAG: --pod-manifest-path="" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.558049 4779 flags.go:64] FLAG: --pod-max-pids="-1" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.558060 4779 flags.go:64] FLAG: --pods-per-core="0" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.558072 4779 flags.go:64] FLAG: --port="10250" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.558083 4779 flags.go:64] FLAG: --protect-kernel-defaults="false" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.558094 4779 flags.go:64] FLAG: --provider-id="" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.558105 4779 flags.go:64] FLAG: --qos-reserved="" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.558118 4779 flags.go:64] FLAG: --read-only-port="10255" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.558130 4779 flags.go:64] FLAG: --register-node="true" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.558141 4779 flags.go:64] FLAG: --register-schedulable="true" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.558153 4779 flags.go:64] FLAG: --register-with-taints="node-role.kubernetes.io/master=:NoSchedule" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.558172 4779 flags.go:64] FLAG: --registry-burst="10" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.558185 4779 flags.go:64] FLAG: --registry-qps="5" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.558198 4779 flags.go:64] FLAG: --reserved-cpus="" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.558209 4779 flags.go:64] FLAG: --reserved-memory="" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.558224 4779 flags.go:64] FLAG: --resolv-conf="/etc/resolv.conf" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.558235 4779 flags.go:64] FLAG: --root-dir="/var/lib/kubelet" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.558247 4779 flags.go:64] FLAG: --rotate-certificates="false" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.558258 4779 flags.go:64] FLAG: --rotate-server-certificates="false" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.558270 4779 flags.go:64] FLAG: --runonce="false" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.558282 4779 flags.go:64] FLAG: --runtime-cgroups="/system.slice/crio.service" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.558294 4779 flags.go:64] FLAG: --runtime-request-timeout="2m0s" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.558306 4779 flags.go:64] FLAG: --seccomp-default="false" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.558318 4779 flags.go:64] FLAG: --serialize-image-pulls="true" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.558371 4779 flags.go:64] FLAG: --storage-driver-buffer-duration="1m0s" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.558384 4779 flags.go:64] FLAG: --storage-driver-db="cadvisor" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.558397 4779 flags.go:64] FLAG: --storage-driver-host="localhost:8086" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.558409 4779 flags.go:64] FLAG: --storage-driver-password="root" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.558421 4779 flags.go:64] FLAG: --storage-driver-secure="false" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.558432 4779 flags.go:64] FLAG: --storage-driver-table="stats" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.558444 4779 flags.go:64] FLAG: --storage-driver-user="root" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.558456 4779 flags.go:64] FLAG: --streaming-connection-idle-timeout="4h0m0s" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.558468 4779 flags.go:64] FLAG: --sync-frequency="1m0s" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.558480 4779 flags.go:64] FLAG: --system-cgroups="" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.558492 4779 flags.go:64] FLAG: --system-reserved="cpu=200m,ephemeral-storage=350Mi,memory=350Mi" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.558514 4779 flags.go:64] FLAG: --system-reserved-cgroup="" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.558526 4779 flags.go:64] FLAG: --tls-cert-file="" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.558537 4779 flags.go:64] FLAG: --tls-cipher-suites="[]" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.558552 4779 flags.go:64] FLAG: --tls-min-version="" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.558564 4779 flags.go:64] FLAG: --tls-private-key-file="" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.558575 4779 flags.go:64] FLAG: --topology-manager-policy="none" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.558587 4779 flags.go:64] FLAG: --topology-manager-policy-options="" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.558598 4779 flags.go:64] FLAG: --topology-manager-scope="container" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.558610 4779 flags.go:64] FLAG: --v="2" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.558626 4779 flags.go:64] FLAG: --version="false" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.558641 4779 flags.go:64] FLAG: --vmodule="" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.558654 4779 flags.go:64] FLAG: --volume-plugin-dir="/etc/kubernetes/kubelet-plugins/volume/exec" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.558666 4779 flags.go:64] FLAG: --volume-stats-agg-period="1m0s" Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.558948 4779 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.558995 4779 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.559007 4779 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.559022 4779 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.559034 4779 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.559046 4779 feature_gate.go:330] unrecognized feature gate: SignatureStores Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.559060 4779 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.559087 4779 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.559100 4779 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.559111 4779 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.559121 4779 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.559134 4779 feature_gate.go:330] unrecognized feature gate: PlatformOperators Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.559144 4779 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.559155 4779 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.559166 4779 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.559177 4779 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.559189 4779 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.559217 4779 feature_gate.go:330] unrecognized feature gate: PinnedImages Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.559229 4779 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.559240 4779 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.559251 4779 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.559261 4779 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.559271 4779 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.559296 4779 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.559306 4779 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.559321 4779 feature_gate.go:330] unrecognized feature gate: OVNObservability Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.559371 4779 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.559383 4779 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.559393 4779 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.559403 4779 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.559413 4779 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.559423 4779 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.559434 4779 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.559444 4779 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.559454 4779 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.559472 4779 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.559482 4779 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.559496 4779 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.559510 4779 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.559521 4779 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.559532 4779 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.559543 4779 feature_gate.go:330] unrecognized feature gate: Example Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.559555 4779 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.559566 4779 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.559576 4779 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.559586 4779 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.559597 4779 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.559609 4779 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.559622 4779 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.559633 4779 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.559643 4779 feature_gate.go:330] unrecognized feature gate: NewOLM Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.559667 4779 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.559678 4779 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.559688 4779 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.559698 4779 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.559708 4779 feature_gate.go:330] unrecognized feature gate: InsightsConfig Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.559718 4779 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.559728 4779 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.559738 4779 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.559762 4779 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.559773 4779 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.559783 4779 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.559792 4779 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.559802 4779 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.559812 4779 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.559821 4779 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.559831 4779 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.559863 4779 feature_gate.go:330] unrecognized feature gate: GatewayAPI Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.559873 4779 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.559884 4779 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.559893 4779 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.559924 4779 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.572753 4779 server.go:491] "Kubelet version" kubeletVersion="v1.31.5" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.572799 4779 server.go:493] "Golang settings" GOGC="" GOMAXPROCS="" GOTRACEBACK="" Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.572880 4779 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.572889 4779 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.572894 4779 feature_gate.go:330] unrecognized feature gate: PinnedImages Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.572898 4779 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.572902 4779 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.572906 4779 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.572909 4779 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.572913 4779 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.572917 4779 feature_gate.go:330] unrecognized feature gate: PlatformOperators Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.572922 4779 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.572927 4779 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.572932 4779 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.572937 4779 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.572941 4779 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.572945 4779 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.572949 4779 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.572954 4779 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.572958 4779 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.572963 4779 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.572968 4779 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.572972 4779 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.572977 4779 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.572981 4779 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.572986 4779 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.572990 4779 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.572995 4779 feature_gate.go:330] unrecognized feature gate: OVNObservability Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.572999 4779 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.573004 4779 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.573008 4779 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.573012 4779 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.573016 4779 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.573021 4779 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.573025 4779 feature_gate.go:330] unrecognized feature gate: NewOLM Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.573030 4779 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.573034 4779 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.573037 4779 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.573042 4779 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.573049 4779 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.573053 4779 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.573058 4779 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.573062 4779 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.573068 4779 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.573072 4779 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.573076 4779 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.573079 4779 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.573083 4779 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.573087 4779 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.573090 4779 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.573094 4779 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.573098 4779 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.573101 4779 feature_gate.go:330] unrecognized feature gate: InsightsConfig Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.573104 4779 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.573108 4779 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.573112 4779 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.573116 4779 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.573119 4779 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.573124 4779 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.573128 4779 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.573131 4779 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.573135 4779 feature_gate.go:330] unrecognized feature gate: Example Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.573138 4779 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.573142 4779 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.573146 4779 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.573149 4779 feature_gate.go:330] unrecognized feature gate: GatewayAPI Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.573153 4779 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.573157 4779 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.573160 4779 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.573163 4779 feature_gate.go:330] unrecognized feature gate: SignatureStores Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.573167 4779 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.573171 4779 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.573174 4779 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.573181 4779 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.573292 4779 feature_gate.go:330] unrecognized feature gate: PinnedImages Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.573300 4779 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.573304 4779 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.573309 4779 feature_gate.go:330] unrecognized feature gate: PlatformOperators Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.573316 4779 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.573320 4779 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.573324 4779 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.573348 4779 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.573356 4779 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.573361 4779 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.573365 4779 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.573369 4779 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.573372 4779 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.573378 4779 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.573382 4779 feature_gate.go:330] unrecognized feature gate: SignatureStores Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.573388 4779 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.573391 4779 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.573395 4779 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.573399 4779 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.573402 4779 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.573407 4779 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.573411 4779 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.573416 4779 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.573421 4779 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.573425 4779 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.573429 4779 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.573433 4779 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.573436 4779 feature_gate.go:330] unrecognized feature gate: GatewayAPI Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.573440 4779 feature_gate.go:330] unrecognized feature gate: InsightsConfig Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.573444 4779 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.573448 4779 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.573452 4779 feature_gate.go:330] unrecognized feature gate: NewOLM Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.573456 4779 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.573459 4779 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.573463 4779 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.573467 4779 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.573471 4779 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.573476 4779 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.573480 4779 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.573484 4779 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.573487 4779 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.573491 4779 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.573494 4779 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.573498 4779 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.573502 4779 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.573506 4779 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.573510 4779 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.573514 4779 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.573517 4779 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.573521 4779 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.573525 4779 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.573528 4779 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.573531 4779 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.573535 4779 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.573538 4779 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.573542 4779 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.573545 4779 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.573549 4779 feature_gate.go:330] unrecognized feature gate: OVNObservability Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.573552 4779 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.573556 4779 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.573559 4779 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.573563 4779 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.573566 4779 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.573570 4779 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.573574 4779 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.573577 4779 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.573581 4779 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.573584 4779 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.573587 4779 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.573591 4779 feature_gate.go:330] unrecognized feature gate: Example Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.573594 4779 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.573601 4779 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.574696 4779 server.go:940] "Client rotation is on, will bootstrap in background" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.578629 4779 bootstrap.go:85] "Current kubeconfig file contents are still valid, no bootstrap necessary" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.578700 4779 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-client-current.pem". Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.580281 4779 server.go:997] "Starting client certificate rotation" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.580305 4779 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate rotation is enabled Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.580692 4779 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate expiration is 2026-02-24 05:52:08 +0000 UTC, rotation deadline is 2025-12-20 12:07:44.011516173 +0000 UTC Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.580834 4779 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Waiting 1960h59m34.430689665s for next certificate rotation Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.606165 4779 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.609062 4779 dynamic_cafile_content.go:161] "Starting controller" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.625074 4779 log.go:25] "Validated CRI v1 runtime API" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.660801 4779 log.go:25] "Validated CRI v1 image API" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.663529 4779 server.go:1437] "Using cgroup driver setting received from the CRI runtime" cgroupDriver="systemd" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.670899 4779 fs.go:133] Filesystem UUIDs: map[0b076daa-c26a-46d2-b3a6-72a8dbc6e257:/dev/vda4 2025-09-29-19-03-38-00:/dev/sr0 7B77-95E7:/dev/vda2 de0497b0-db1b-465a-b278-03db02455c71:/dev/vda3] Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.670952 4779 fs.go:134] Filesystem partitions: map[/dev/shm:{mountpoint:/dev/shm major:0 minor:22 fsType:tmpfs blockSize:0} /dev/vda3:{mountpoint:/boot major:252 minor:3 fsType:ext4 blockSize:0} /dev/vda4:{mountpoint:/var major:252 minor:4 fsType:xfs blockSize:0} /run:{mountpoint:/run major:0 minor:24 fsType:tmpfs blockSize:0} /run/user/1000:{mountpoint:/run/user/1000 major:0 minor:42 fsType:tmpfs blockSize:0} /tmp:{mountpoint:/tmp major:0 minor:30 fsType:tmpfs blockSize:0} /var/lib/etcd:{mountpoint:/var/lib/etcd major:0 minor:43 fsType:tmpfs blockSize:0}] Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.691713 4779 manager.go:217] Machine: {Timestamp:2025-09-29 19:08:09.688096515 +0000 UTC m=+0.572521655 CPUVendorID:AuthenticAMD NumCores:12 NumPhysicalCores:1 NumSockets:12 CpuFrequency:2800000 MemoryCapacity:33654128640 SwapCapacity:0 MemoryByType:map[] NVMInfo:{MemoryModeCapacity:0 AppDirectModeCapacity:0 AvgPowerBudget:0} HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] MachineID:21801e6708c44f15b81395eb736a7cec SystemUUID:d61591a8-214b-4be1-8c58-e9ade5216b62 BootID:c0f08dfd-4d0c-4b55-a30c-6725bfe13689 Filesystems:[{Device:/dev/vda4 DeviceMajor:252 DeviceMinor:4 Capacity:85292941312 Type:vfs Inodes:41679680 HasInodes:true} {Device:/tmp DeviceMajor:0 DeviceMinor:30 Capacity:16827064320 Type:vfs Inodes:1048576 HasInodes:true} {Device:/dev/vda3 DeviceMajor:252 DeviceMinor:3 Capacity:366869504 Type:vfs Inodes:98304 HasInodes:true} {Device:/run/user/1000 DeviceMajor:0 DeviceMinor:42 Capacity:3365412864 Type:vfs Inodes:821634 HasInodes:true} {Device:/var/lib/etcd DeviceMajor:0 DeviceMinor:43 Capacity:1073741824 Type:vfs Inodes:4108170 HasInodes:true} {Device:/dev/shm DeviceMajor:0 DeviceMinor:22 Capacity:16827064320 Type:vfs Inodes:4108170 HasInodes:true} {Device:/run DeviceMajor:0 DeviceMinor:24 Capacity:6730825728 Type:vfs Inodes:819200 HasInodes:true}] DiskMap:map[252:0:{Name:vda Major:252 Minor:0 Size:214748364800 Scheduler:none}] NetworkDevices:[{Name:br-ex MacAddress:fa:16:3e:76:41:00 Speed:0 Mtu:1500} {Name:br-int MacAddress:d6:39:55:2e:22:71 Speed:0 Mtu:1400} {Name:ens3 MacAddress:fa:16:3e:76:41:00 Speed:-1 Mtu:1500} {Name:ens7 MacAddress:fa:16:3e:d8:ab:9a Speed:-1 Mtu:1500} {Name:ens7.20 MacAddress:52:54:00:af:b3:6e Speed:-1 Mtu:1496} {Name:ens7.21 MacAddress:52:54:00:3b:7f:74 Speed:-1 Mtu:1496} {Name:ens7.22 MacAddress:52:54:00:4c:5c:70 Speed:-1 Mtu:1496} {Name:eth10 MacAddress:aa:6b:d2:ac:79:03 Speed:0 Mtu:1500} {Name:ovn-k8s-mp0 MacAddress:0a:58:0a:d9:00:02 Speed:0 Mtu:1400} {Name:ovs-system MacAddress:92:1d:7e:d7:78:fe Speed:0 Mtu:1500}] Topology:[{Id:0 Memory:33654128640 HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] Cores:[{Id:0 Threads:[0] Caches:[{Id:0 Size:32768 Type:Data Level:1} {Id:0 Size:32768 Type:Instruction Level:1} {Id:0 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:0 Size:16777216 Type:Unified Level:3}] SocketID:0 BookID: DrawerID:} {Id:0 Threads:[1] Caches:[{Id:1 Size:32768 Type:Data Level:1} {Id:1 Size:32768 Type:Instruction Level:1} {Id:1 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:1 Size:16777216 Type:Unified Level:3}] SocketID:1 BookID: DrawerID:} {Id:0 Threads:[10] Caches:[{Id:10 Size:32768 Type:Data Level:1} {Id:10 Size:32768 Type:Instruction Level:1} {Id:10 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:10 Size:16777216 Type:Unified Level:3}] SocketID:10 BookID: DrawerID:} {Id:0 Threads:[11] Caches:[{Id:11 Size:32768 Type:Data Level:1} {Id:11 Size:32768 Type:Instruction Level:1} {Id:11 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:11 Size:16777216 Type:Unified Level:3}] SocketID:11 BookID: DrawerID:} {Id:0 Threads:[2] Caches:[{Id:2 Size:32768 Type:Data Level:1} {Id:2 Size:32768 Type:Instruction Level:1} {Id:2 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:2 Size:16777216 Type:Unified Level:3}] SocketID:2 BookID: DrawerID:} {Id:0 Threads:[3] Caches:[{Id:3 Size:32768 Type:Data Level:1} {Id:3 Size:32768 Type:Instruction Level:1} {Id:3 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:3 Size:16777216 Type:Unified Level:3}] SocketID:3 BookID: DrawerID:} {Id:0 Threads:[4] Caches:[{Id:4 Size:32768 Type:Data Level:1} {Id:4 Size:32768 Type:Instruction Level:1} {Id:4 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:4 Size:16777216 Type:Unified Level:3}] SocketID:4 BookID: DrawerID:} {Id:0 Threads:[5] Caches:[{Id:5 Size:32768 Type:Data Level:1} {Id:5 Size:32768 Type:Instruction Level:1} {Id:5 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:5 Size:16777216 Type:Unified Level:3}] SocketID:5 BookID: DrawerID:} {Id:0 Threads:[6] Caches:[{Id:6 Size:32768 Type:Data Level:1} {Id:6 Size:32768 Type:Instruction Level:1} {Id:6 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:6 Size:16777216 Type:Unified Level:3}] SocketID:6 BookID: DrawerID:} {Id:0 Threads:[7] Caches:[{Id:7 Size:32768 Type:Data Level:1} {Id:7 Size:32768 Type:Instruction Level:1} {Id:7 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:7 Size:16777216 Type:Unified Level:3}] SocketID:7 BookID: DrawerID:} {Id:0 Threads:[8] Caches:[{Id:8 Size:32768 Type:Data Level:1} {Id:8 Size:32768 Type:Instruction Level:1} {Id:8 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:8 Size:16777216 Type:Unified Level:3}] SocketID:8 BookID: DrawerID:} {Id:0 Threads:[9] Caches:[{Id:9 Size:32768 Type:Data Level:1} {Id:9 Size:32768 Type:Instruction Level:1} {Id:9 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:9 Size:16777216 Type:Unified Level:3}] SocketID:9 BookID: DrawerID:}] Caches:[] Distances:[10]}] CloudProvider:Unknown InstanceType:Unknown InstanceID:None} Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.692007 4779 manager_no_libpfm.go:29] cAdvisor is build without cgo and/or libpfm support. Perf event counters are not available. Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.692184 4779 manager.go:233] Version: {KernelVersion:5.14.0-427.50.2.el9_4.x86_64 ContainerOsVersion:Red Hat Enterprise Linux CoreOS 418.94.202502100215-0 DockerVersion: DockerAPIVersion: CadvisorVersion: CadvisorRevision:} Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.692711 4779 swap_util.go:113] "Swap is on" /proc/swaps contents="Filename\t\t\t\tType\t\tSize\t\tUsed\t\tPriority" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.692983 4779 container_manager_linux.go:267] "Container manager verified user specified cgroup-root exists" cgroupRoot=[] Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.693027 4779 container_manager_linux.go:272] "Creating Container Manager object based on Node Config" nodeConfig={"NodeName":"crc","RuntimeCgroupsName":"/system.slice/crio.service","SystemCgroupsName":"/system.slice","KubeletCgroupsName":"","KubeletOOMScoreAdj":-999,"ContainerRuntime":"","CgroupsPerQOS":true,"CgroupRoot":"/","CgroupDriver":"systemd","KubeletRootDir":"/var/lib/kubelet","ProtectKernelDefaults":true,"KubeReservedCgroupName":"","SystemReservedCgroupName":"","ReservedSystemCPUs":{},"EnforceNodeAllocatable":{"pods":{}},"KubeReserved":null,"SystemReserved":{"cpu":"200m","ephemeral-storage":"350Mi","memory":"350Mi"},"HardEvictionThresholds":[{"Signal":"memory.available","Operator":"LessThan","Value":{"Quantity":"100Mi","Percentage":0},"GracePeriod":0,"MinReclaim":null},{"Signal":"nodefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.1},"GracePeriod":0,"MinReclaim":null},{"Signal":"nodefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null},{"Signal":"imagefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.15},"GracePeriod":0,"MinReclaim":null},{"Signal":"imagefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null}],"QOSReserved":{},"CPUManagerPolicy":"none","CPUManagerPolicyOptions":null,"TopologyManagerScope":"container","CPUManagerReconcilePeriod":10000000000,"ExperimentalMemoryManagerPolicy":"None","ExperimentalMemoryManagerReservedMemory":null,"PodPidsLimit":4096,"EnforceCPULimits":true,"CPUCFSQuotaPeriod":100000000,"TopologyManagerPolicy":"none","TopologyManagerPolicyOptions":null,"CgroupVersion":2} Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.693274 4779 topology_manager.go:138] "Creating topology manager with none policy" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.693286 4779 container_manager_linux.go:303] "Creating device plugin manager" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.693866 4779 manager.go:142] "Creating Device Plugin manager" path="/var/lib/kubelet/device-plugins/kubelet.sock" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.693909 4779 server.go:66] "Creating device plugin registration server" version="v1beta1" socket="/var/lib/kubelet/device-plugins/kubelet.sock" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.694619 4779 state_mem.go:36] "Initialized new in-memory state store" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.694737 4779 server.go:1245] "Using root directory" path="/var/lib/kubelet" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.699212 4779 kubelet.go:418] "Attempting to sync node with API server" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.699246 4779 kubelet.go:313] "Adding static pod path" path="/etc/kubernetes/manifests" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.699304 4779 file.go:69] "Watching path" path="/etc/kubernetes/manifests" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.699325 4779 kubelet.go:324] "Adding apiserver pod source" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.699370 4779 apiserver.go:42] "Waiting for node sync before watching apiserver pods" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.703641 4779 kuberuntime_manager.go:262] "Container runtime initialized" containerRuntime="cri-o" version="1.31.5-4.rhaos4.18.gitdad78d5.el9" apiVersion="v1" Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.705003 4779 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.102.83.201:6443: connect: connection refused Sep 29 19:08:09 crc kubenswrapper[4779]: E0929 19:08:09.705080 4779 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.102.83.201:6443: connect: connection refused" logger="UnhandledError" Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.705260 4779 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.201:6443: connect: connection refused Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.705450 4779 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-server-current.pem". Sep 29 19:08:09 crc kubenswrapper[4779]: E0929 19:08:09.705450 4779 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.201:6443: connect: connection refused" logger="UnhandledError" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.707920 4779 kubelet.go:854] "Not starting ClusterTrustBundle informer because we are in static kubelet mode" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.709394 4779 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/portworx-volume" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.709418 4779 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/empty-dir" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.709426 4779 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/git-repo" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.709433 4779 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/host-path" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.709444 4779 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/nfs" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.709451 4779 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/secret" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.709459 4779 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/iscsi" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.709470 4779 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/downward-api" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.709479 4779 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/fc" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.709487 4779 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/configmap" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.709524 4779 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/projected" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.709531 4779 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/local-volume" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.710561 4779 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/csi" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.710956 4779 server.go:1280] "Started kubelet" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.711938 4779 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.201:6443: connect: connection refused Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.712366 4779 server.go:163] "Starting to listen" address="0.0.0.0" port=10250 Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.712370 4779 ratelimit.go:55] "Setting rate limiting for endpoint" service="podresources" qps=100 burstTokens=10 Sep 29 19:08:09 crc systemd[1]: Started Kubernetes Kubelet. Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.712988 4779 server.go:236] "Starting to serve the podresources API" endpoint="unix:/var/lib/kubelet/pod-resources/kubelet.sock" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.713928 4779 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate rotation is enabled Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.713963 4779 fs_resource_analyzer.go:67] "Starting FS ResourceAnalyzer" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.713990 4779 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-15 20:32:44.412735059 +0000 UTC Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.714024 4779 certificate_manager.go:356] kubernetes.io/kubelet-serving: Waiting 1849h24m34.698713073s for next certificate rotation Sep 29 19:08:09 crc kubenswrapper[4779]: E0929 19:08:09.714102 4779 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.714191 4779 volume_manager.go:287] "The desired_state_of_world populator starts" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.714200 4779 volume_manager.go:289] "Starting Kubelet Volume Manager" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.714230 4779 desired_state_of_world_populator.go:146] "Desired state populator starts to run" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.717203 4779 server.go:460] "Adding debug handlers to kubelet server" Sep 29 19:08:09 crc kubenswrapper[4779]: E0929 19:08:09.717555 4779 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.201:6443: connect: connection refused" interval="200ms" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.717897 4779 factory.go:55] Registering systemd factory Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.717914 4779 factory.go:221] Registration of the systemd container factory successfully Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.718550 4779 factory.go:153] Registering CRI-O factory Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.718598 4779 factory.go:221] Registration of the crio container factory successfully Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.718866 4779 factory.go:219] Registration of the containerd container factory failed: unable to create containerd client: containerd: cannot unix dial containerd api service: dial unix /run/containerd/containerd.sock: connect: no such file or directory Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.718891 4779 factory.go:103] Registering Raw factory Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.718907 4779 manager.go:1196] Started watching for new ooms in manager Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.720536 4779 manager.go:319] Starting recovery of all containers Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.722534 4779 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.201:6443: connect: connection refused Sep 29 19:08:09 crc kubenswrapper[4779]: E0929 19:08:09.722742 4779 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.201:6443: connect: connection refused" logger="UnhandledError" Sep 29 19:08:09 crc kubenswrapper[4779]: E0929 19:08:09.725424 4779 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/default/events\": dial tcp 38.102.83.201:6443: connect: connection refused" event="&Event{ObjectMeta:{crc.1869d66c35b7a755 default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:Starting,Message:Starting kubelet.,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-09-29 19:08:09.710929749 +0000 UTC m=+0.595354859,LastTimestamp:2025-09-29 19:08:09.710929749 +0000 UTC m=+0.595354859,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.732880 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" seLinuxMountContext="" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.732925 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" seLinuxMountContext="" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.732945 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" seLinuxMountContext="" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.732955 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" seLinuxMountContext="" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.732965 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" seLinuxMountContext="" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.732976 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" seLinuxMountContext="" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.732987 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" seLinuxMountContext="" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.732997 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" seLinuxMountContext="" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.733015 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" seLinuxMountContext="" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.733026 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" seLinuxMountContext="" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.733035 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" seLinuxMountContext="" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.733046 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" seLinuxMountContext="" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.733057 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" seLinuxMountContext="" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.733073 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" seLinuxMountContext="" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.733085 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" seLinuxMountContext="" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.733098 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" seLinuxMountContext="" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.733109 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" seLinuxMountContext="" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.733121 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" seLinuxMountContext="" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.733132 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" seLinuxMountContext="" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.733143 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" seLinuxMountContext="" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.733158 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" seLinuxMountContext="" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.733168 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" seLinuxMountContext="" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.733179 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" seLinuxMountContext="" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.733216 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" seLinuxMountContext="" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.733227 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" seLinuxMountContext="" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.733237 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" seLinuxMountContext="" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.733250 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" seLinuxMountContext="" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.733261 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49ef4625-1d3a-4a9f-b595-c2433d32326d" volumeName="kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" seLinuxMountContext="" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.733272 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf" seLinuxMountContext="" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.733281 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" seLinuxMountContext="" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.733291 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" seLinuxMountContext="" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.733302 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm" seLinuxMountContext="" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.733316 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" seLinuxMountContext="" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.733361 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" seLinuxMountContext="" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.733373 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" seLinuxMountContext="" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.733382 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" seLinuxMountContext="" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.733392 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" seLinuxMountContext="" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.733400 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" seLinuxMountContext="" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.733409 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" seLinuxMountContext="" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.733418 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" seLinuxMountContext="" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.733428 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb" seLinuxMountContext="" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.733438 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" seLinuxMountContext="" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.733449 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" seLinuxMountContext="" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.733462 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" seLinuxMountContext="" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.733472 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" seLinuxMountContext="" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.733481 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" volumeName="kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" seLinuxMountContext="" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.733491 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" seLinuxMountContext="" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.733504 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" seLinuxMountContext="" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.733513 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" seLinuxMountContext="" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.733522 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" seLinuxMountContext="" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.733532 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" seLinuxMountContext="" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.733541 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" seLinuxMountContext="" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.733556 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" seLinuxMountContext="" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.733566 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" seLinuxMountContext="" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.733597 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" seLinuxMountContext="" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.733608 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" seLinuxMountContext="" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.733618 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" seLinuxMountContext="" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.733627 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" seLinuxMountContext="" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.733636 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" seLinuxMountContext="" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.733644 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" seLinuxMountContext="" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.733652 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert" seLinuxMountContext="" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.733661 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" seLinuxMountContext="" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.733673 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" seLinuxMountContext="" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.733684 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" seLinuxMountContext="" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.733694 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" seLinuxMountContext="" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.733704 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" seLinuxMountContext="" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.733715 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" seLinuxMountContext="" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.733726 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf" seLinuxMountContext="" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.733737 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" seLinuxMountContext="" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.733746 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" seLinuxMountContext="" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.733755 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" seLinuxMountContext="" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.733764 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" seLinuxMountContext="" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.733774 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" seLinuxMountContext="" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.733783 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" seLinuxMountContext="" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.733792 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" seLinuxMountContext="" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.733802 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" seLinuxMountContext="" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.733812 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" seLinuxMountContext="" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.733821 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides" seLinuxMountContext="" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.733837 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" seLinuxMountContext="" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.733847 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" seLinuxMountContext="" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.733858 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" seLinuxMountContext="" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.733870 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" seLinuxMountContext="" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.733880 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" seLinuxMountContext="" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.733891 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" seLinuxMountContext="" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.733903 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" seLinuxMountContext="" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.733915 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" seLinuxMountContext="" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.733925 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" seLinuxMountContext="" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.733935 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" seLinuxMountContext="" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.733944 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert" seLinuxMountContext="" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.733954 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" seLinuxMountContext="" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.733965 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" seLinuxMountContext="" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.733974 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="44663579-783b-4372-86d6-acf235a62d72" volumeName="kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" seLinuxMountContext="" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.733983 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" seLinuxMountContext="" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.733991 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" seLinuxMountContext="" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.734001 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" seLinuxMountContext="" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.734011 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" seLinuxMountContext="" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.734021 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" seLinuxMountContext="" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.734030 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" seLinuxMountContext="" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.734040 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" seLinuxMountContext="" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.734050 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" seLinuxMountContext="" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.734060 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" seLinuxMountContext="" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.734068 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" seLinuxMountContext="" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.734077 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" seLinuxMountContext="" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.734086 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" seLinuxMountContext="" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.734100 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" seLinuxMountContext="" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.734109 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" seLinuxMountContext="" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.734119 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" seLinuxMountContext="" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.734129 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" seLinuxMountContext="" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.734138 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" seLinuxMountContext="" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.734148 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" seLinuxMountContext="" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.734161 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" seLinuxMountContext="" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.734172 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" seLinuxMountContext="" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.734182 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" seLinuxMountContext="" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.734192 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" seLinuxMountContext="" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.734201 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" seLinuxMountContext="" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.734210 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" seLinuxMountContext="" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.734219 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" seLinuxMountContext="" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.734229 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" seLinuxMountContext="" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.734237 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" seLinuxMountContext="" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.734245 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" seLinuxMountContext="" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.734254 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" seLinuxMountContext="" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.734265 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" seLinuxMountContext="" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.734273 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" seLinuxMountContext="" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.734282 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" seLinuxMountContext="" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.734292 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" seLinuxMountContext="" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.734301 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" seLinuxMountContext="" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.734312 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" seLinuxMountContext="" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.734347 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" seLinuxMountContext="" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.734356 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" seLinuxMountContext="" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.734366 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" seLinuxMountContext="" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.734374 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" seLinuxMountContext="" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.734383 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" seLinuxMountContext="" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.734392 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" seLinuxMountContext="" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.734400 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" seLinuxMountContext="" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.734408 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" seLinuxMountContext="" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.734417 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" seLinuxMountContext="" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.734426 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" seLinuxMountContext="" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.734434 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" seLinuxMountContext="" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.734443 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" seLinuxMountContext="" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.734452 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" seLinuxMountContext="" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.734462 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" seLinuxMountContext="" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.734471 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script" seLinuxMountContext="" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.734480 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" seLinuxMountContext="" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.734490 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" seLinuxMountContext="" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.734500 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" seLinuxMountContext="" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.734509 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" seLinuxMountContext="" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.734518 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" seLinuxMountContext="" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.734528 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" seLinuxMountContext="" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.734537 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" seLinuxMountContext="" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.734547 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" seLinuxMountContext="" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.734557 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" seLinuxMountContext="" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.734566 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" seLinuxMountContext="" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.734576 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" seLinuxMountContext="" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.734586 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" seLinuxMountContext="" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.734596 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5" seLinuxMountContext="" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.734604 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" seLinuxMountContext="" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.734613 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" seLinuxMountContext="" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.734622 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" seLinuxMountContext="" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.734632 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" seLinuxMountContext="" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.734642 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" seLinuxMountContext="" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.734651 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" seLinuxMountContext="" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.734660 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" seLinuxMountContext="" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.734669 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" seLinuxMountContext="" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.734678 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" seLinuxMountContext="" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.734687 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" seLinuxMountContext="" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.734696 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" seLinuxMountContext="" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.734705 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d751cbb-f2e2-430d-9754-c882a5e924a5" volumeName="kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl" seLinuxMountContext="" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.734715 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" seLinuxMountContext="" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.734724 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" seLinuxMountContext="" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.734733 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" seLinuxMountContext="" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.734742 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" seLinuxMountContext="" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.734750 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls" seLinuxMountContext="" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.734760 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" seLinuxMountContext="" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.734769 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" seLinuxMountContext="" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.734780 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" seLinuxMountContext="" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.734789 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" seLinuxMountContext="" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.734799 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" seLinuxMountContext="" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.734807 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" seLinuxMountContext="" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.734817 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" seLinuxMountContext="" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.736912 4779 reconstruct.go:144] "Volume is marked device as uncertain and added into the actual state" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" deviceMountPath="/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.736958 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" seLinuxMountContext="" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.736972 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" seLinuxMountContext="" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.736983 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" seLinuxMountContext="" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.736994 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" seLinuxMountContext="" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.737005 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" seLinuxMountContext="" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.737013 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" seLinuxMountContext="" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.737023 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" seLinuxMountContext="" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.737032 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" seLinuxMountContext="" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.737042 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" seLinuxMountContext="" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.737051 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" seLinuxMountContext="" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.737061 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" volumeName="kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" seLinuxMountContext="" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.737075 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" seLinuxMountContext="" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.737085 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" seLinuxMountContext="" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.737096 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" seLinuxMountContext="" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.737106 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3b6479f0-333b-4a96-9adf-2099afdc2447" volumeName="kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr" seLinuxMountContext="" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.737115 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" seLinuxMountContext="" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.737125 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" seLinuxMountContext="" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.737136 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" seLinuxMountContext="" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.737146 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" seLinuxMountContext="" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.737156 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" seLinuxMountContext="" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.737166 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" seLinuxMountContext="" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.737176 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" seLinuxMountContext="" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.737187 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" seLinuxMountContext="" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.737197 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" seLinuxMountContext="" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.737206 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" seLinuxMountContext="" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.737215 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" seLinuxMountContext="" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.737225 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" seLinuxMountContext="" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.737236 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" seLinuxMountContext="" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.737246 4779 reconstruct.go:97] "Volume reconstruction finished" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.737254 4779 reconciler.go:26] "Reconciler: start to sync state" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.747234 4779 manager.go:324] Recovery completed Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.760858 4779 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.761670 4779 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv4" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.762284 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.762340 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.762361 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.764865 4779 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv6" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.764902 4779 status_manager.go:217] "Starting to sync pod status with apiserver" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.764927 4779 kubelet.go:2335] "Starting kubelet main sync loop" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.764992 4779 cpu_manager.go:225] "Starting CPU manager" policy="none" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.765020 4779 cpu_manager.go:226] "Reconciling" reconcilePeriod="10s" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.765047 4779 state_mem.go:36] "Initialized new in-memory state store" Sep 29 19:08:09 crc kubenswrapper[4779]: E0929 19:08:09.765067 4779 kubelet.go:2359] "Skipping pod synchronization" err="[container runtime status check may not have completed yet, PLEG is not healthy: pleg has yet to be successful]" Sep 29 19:08:09 crc kubenswrapper[4779]: W0929 19:08:09.766778 4779 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.102.83.201:6443: connect: connection refused Sep 29 19:08:09 crc kubenswrapper[4779]: E0929 19:08:09.766854 4779 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.102.83.201:6443: connect: connection refused" logger="UnhandledError" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.786792 4779 policy_none.go:49] "None policy: Start" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.787824 4779 memory_manager.go:170] "Starting memorymanager" policy="None" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.787869 4779 state_mem.go:35] "Initializing new in-memory state store" Sep 29 19:08:09 crc kubenswrapper[4779]: E0929 19:08:09.814817 4779 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.832673 4779 manager.go:334] "Starting Device Plugin manager" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.832833 4779 manager.go:513] "Failed to read data from checkpoint" checkpoint="kubelet_internal_checkpoint" err="checkpoint is not found" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.832854 4779 server.go:79] "Starting device plugin registration server" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.833268 4779 eviction_manager.go:189] "Eviction manager: starting control loop" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.833295 4779 container_log_manager.go:189] "Initializing container log rotate workers" workers=1 monitorPeriod="10s" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.833496 4779 plugin_watcher.go:51] "Plugin Watcher Start" path="/var/lib/kubelet/plugins_registry" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.833675 4779 plugin_manager.go:116] "The desired_state_of_world populator (plugin watcher) starts" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.833717 4779 plugin_manager.go:118] "Starting Kubelet Plugin Manager" Sep 29 19:08:09 crc kubenswrapper[4779]: E0929 19:08:09.839512 4779 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.865194 4779 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-kube-scheduler/openshift-kube-scheduler-crc","openshift-machine-config-operator/kube-rbac-proxy-crio-crc","openshift-etcd/etcd-crc","openshift-kube-apiserver/kube-apiserver-crc","openshift-kube-controller-manager/kube-controller-manager-crc"] Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.865360 4779 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.866935 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.866978 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.866986 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.867117 4779 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.867344 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.867390 4779 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.868249 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.868278 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.868288 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.868359 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.868386 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.868404 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.868540 4779 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.868697 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.868735 4779 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.869353 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.869380 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.869391 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.869424 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.869443 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.869453 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.869488 4779 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.869662 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.869691 4779 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.870922 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.870966 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.870986 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.870996 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.871019 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.871030 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.871133 4779 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.871226 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.871268 4779 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.872010 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.872037 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.872047 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.872171 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.872194 4779 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.872494 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.872512 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.872520 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.873077 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.873113 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.873126 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:09 crc kubenswrapper[4779]: E0929 19:08:09.918606 4779 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.201:6443: connect: connection refused" interval="400ms" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.933870 4779 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.935194 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.935224 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.935233 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.935252 4779 kubelet_node_status.go:76] "Attempting to register node" node="crc" Sep 29 19:08:09 crc kubenswrapper[4779]: E0929 19:08:09.935660 4779 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.201:6443: connect: connection refused" node="crc" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.939991 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.940016 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.940054 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.940068 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.940083 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.940096 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.940110 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.940123 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.940136 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.940149 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.940163 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.940252 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.940336 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.940375 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 29 19:08:09 crc kubenswrapper[4779]: I0929 19:08:09.940401 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 29 19:08:10 crc kubenswrapper[4779]: I0929 19:08:10.041989 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Sep 29 19:08:10 crc kubenswrapper[4779]: I0929 19:08:10.042067 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Sep 29 19:08:10 crc kubenswrapper[4779]: I0929 19:08:10.042102 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 29 19:08:10 crc kubenswrapper[4779]: I0929 19:08:10.042130 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 29 19:08:10 crc kubenswrapper[4779]: I0929 19:08:10.042161 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Sep 29 19:08:10 crc kubenswrapper[4779]: I0929 19:08:10.042189 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Sep 29 19:08:10 crc kubenswrapper[4779]: I0929 19:08:10.042219 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 29 19:08:10 crc kubenswrapper[4779]: I0929 19:08:10.042241 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Sep 29 19:08:10 crc kubenswrapper[4779]: I0929 19:08:10.042251 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Sep 29 19:08:10 crc kubenswrapper[4779]: I0929 19:08:10.042296 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Sep 29 19:08:10 crc kubenswrapper[4779]: I0929 19:08:10.042270 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 29 19:08:10 crc kubenswrapper[4779]: I0929 19:08:10.042383 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 29 19:08:10 crc kubenswrapper[4779]: I0929 19:08:10.042412 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 29 19:08:10 crc kubenswrapper[4779]: I0929 19:08:10.042422 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 29 19:08:10 crc kubenswrapper[4779]: I0929 19:08:10.042245 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Sep 29 19:08:10 crc kubenswrapper[4779]: I0929 19:08:10.042429 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Sep 29 19:08:10 crc kubenswrapper[4779]: I0929 19:08:10.042491 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 29 19:08:10 crc kubenswrapper[4779]: I0929 19:08:10.042554 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 29 19:08:10 crc kubenswrapper[4779]: I0929 19:08:10.042575 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Sep 29 19:08:10 crc kubenswrapper[4779]: I0929 19:08:10.042626 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 29 19:08:10 crc kubenswrapper[4779]: I0929 19:08:10.042647 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Sep 29 19:08:10 crc kubenswrapper[4779]: I0929 19:08:10.042702 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 29 19:08:10 crc kubenswrapper[4779]: I0929 19:08:10.042716 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Sep 29 19:08:10 crc kubenswrapper[4779]: I0929 19:08:10.042748 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 29 19:08:10 crc kubenswrapper[4779]: I0929 19:08:10.042762 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 29 19:08:10 crc kubenswrapper[4779]: I0929 19:08:10.042821 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 29 19:08:10 crc kubenswrapper[4779]: I0929 19:08:10.042833 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 29 19:08:10 crc kubenswrapper[4779]: I0929 19:08:10.042874 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 29 19:08:10 crc kubenswrapper[4779]: I0929 19:08:10.042928 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 29 19:08:10 crc kubenswrapper[4779]: I0929 19:08:10.043036 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 29 19:08:10 crc kubenswrapper[4779]: I0929 19:08:10.135788 4779 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 19:08:10 crc kubenswrapper[4779]: I0929 19:08:10.143891 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:10 crc kubenswrapper[4779]: I0929 19:08:10.143968 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:10 crc kubenswrapper[4779]: I0929 19:08:10.143994 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:10 crc kubenswrapper[4779]: I0929 19:08:10.144044 4779 kubelet_node_status.go:76] "Attempting to register node" node="crc" Sep 29 19:08:10 crc kubenswrapper[4779]: E0929 19:08:10.144913 4779 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.201:6443: connect: connection refused" node="crc" Sep 29 19:08:10 crc kubenswrapper[4779]: I0929 19:08:10.186693 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Sep 29 19:08:10 crc kubenswrapper[4779]: I0929 19:08:10.194025 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 29 19:08:10 crc kubenswrapper[4779]: I0929 19:08:10.214976 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Sep 29 19:08:10 crc kubenswrapper[4779]: I0929 19:08:10.222403 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Sep 29 19:08:10 crc kubenswrapper[4779]: I0929 19:08:10.225656 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Sep 29 19:08:10 crc kubenswrapper[4779]: W0929 19:08:10.237892 4779 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf4b27818a5e8e43d0dc095d08835c792.slice/crio-b03807e2e850151e729a3e9506ce28136071347d8be5e8ec9fd8098e6bc07aad WatchSource:0}: Error finding container b03807e2e850151e729a3e9506ce28136071347d8be5e8ec9fd8098e6bc07aad: Status 404 returned error can't find the container with id b03807e2e850151e729a3e9506ce28136071347d8be5e8ec9fd8098e6bc07aad Sep 29 19:08:10 crc kubenswrapper[4779]: W0929 19:08:10.238866 4779 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3dcd261975c3d6b9a6ad6367fd4facd3.slice/crio-6e8f8984ac49515e2fa866a37b2be341ecf9895fbe787f29b02b4552a4ca2919 WatchSource:0}: Error finding container 6e8f8984ac49515e2fa866a37b2be341ecf9895fbe787f29b02b4552a4ca2919: Status 404 returned error can't find the container with id 6e8f8984ac49515e2fa866a37b2be341ecf9895fbe787f29b02b4552a4ca2919 Sep 29 19:08:10 crc kubenswrapper[4779]: W0929 19:08:10.248623 4779 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd1b160f5dda77d281dd8e69ec8d817f9.slice/crio-60e2c009796d64b4f18f6366b54477f34f1bf0062f352de916bb296fbf5aae13 WatchSource:0}: Error finding container 60e2c009796d64b4f18f6366b54477f34f1bf0062f352de916bb296fbf5aae13: Status 404 returned error can't find the container with id 60e2c009796d64b4f18f6366b54477f34f1bf0062f352de916bb296fbf5aae13 Sep 29 19:08:10 crc kubenswrapper[4779]: W0929 19:08:10.252616 4779 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf614b9022728cf315e60c057852e563e.slice/crio-c4ff05d321d15fd4829e2825ee28145c0cc91d665642315dda8a4e4bcc31747a WatchSource:0}: Error finding container c4ff05d321d15fd4829e2825ee28145c0cc91d665642315dda8a4e4bcc31747a: Status 404 returned error can't find the container with id c4ff05d321d15fd4829e2825ee28145c0cc91d665642315dda8a4e4bcc31747a Sep 29 19:08:10 crc kubenswrapper[4779]: W0929 19:08:10.254376 4779 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2139d3e2895fc6797b9c76a1b4c9886d.slice/crio-57d8700fc77bc1971a74029db69a65c49a2d54b99009ba56d7e2adfc75ac76a0 WatchSource:0}: Error finding container 57d8700fc77bc1971a74029db69a65c49a2d54b99009ba56d7e2adfc75ac76a0: Status 404 returned error can't find the container with id 57d8700fc77bc1971a74029db69a65c49a2d54b99009ba56d7e2adfc75ac76a0 Sep 29 19:08:10 crc kubenswrapper[4779]: E0929 19:08:10.320154 4779 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.201:6443: connect: connection refused" interval="800ms" Sep 29 19:08:10 crc kubenswrapper[4779]: W0929 19:08:10.537820 4779 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.102.83.201:6443: connect: connection refused Sep 29 19:08:10 crc kubenswrapper[4779]: E0929 19:08:10.537924 4779 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.102.83.201:6443: connect: connection refused" logger="UnhandledError" Sep 29 19:08:10 crc kubenswrapper[4779]: I0929 19:08:10.545270 4779 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 19:08:10 crc kubenswrapper[4779]: I0929 19:08:10.546867 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:10 crc kubenswrapper[4779]: I0929 19:08:10.546916 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:10 crc kubenswrapper[4779]: I0929 19:08:10.546934 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:10 crc kubenswrapper[4779]: I0929 19:08:10.546965 4779 kubelet_node_status.go:76] "Attempting to register node" node="crc" Sep 29 19:08:10 crc kubenswrapper[4779]: E0929 19:08:10.547562 4779 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.201:6443: connect: connection refused" node="crc" Sep 29 19:08:10 crc kubenswrapper[4779]: I0929 19:08:10.712877 4779 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.201:6443: connect: connection refused Sep 29 19:08:10 crc kubenswrapper[4779]: I0929 19:08:10.771842 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"57d8700fc77bc1971a74029db69a65c49a2d54b99009ba56d7e2adfc75ac76a0"} Sep 29 19:08:10 crc kubenswrapper[4779]: I0929 19:08:10.772985 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"c4ff05d321d15fd4829e2825ee28145c0cc91d665642315dda8a4e4bcc31747a"} Sep 29 19:08:10 crc kubenswrapper[4779]: I0929 19:08:10.774867 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"60e2c009796d64b4f18f6366b54477f34f1bf0062f352de916bb296fbf5aae13"} Sep 29 19:08:10 crc kubenswrapper[4779]: I0929 19:08:10.776252 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"b03807e2e850151e729a3e9506ce28136071347d8be5e8ec9fd8098e6bc07aad"} Sep 29 19:08:10 crc kubenswrapper[4779]: I0929 19:08:10.777285 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"6e8f8984ac49515e2fa866a37b2be341ecf9895fbe787f29b02b4552a4ca2919"} Sep 29 19:08:10 crc kubenswrapper[4779]: W0929 19:08:10.884909 4779 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.201:6443: connect: connection refused Sep 29 19:08:10 crc kubenswrapper[4779]: E0929 19:08:10.885316 4779 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.201:6443: connect: connection refused" logger="UnhandledError" Sep 29 19:08:10 crc kubenswrapper[4779]: W0929 19:08:10.938912 4779 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.102.83.201:6443: connect: connection refused Sep 29 19:08:10 crc kubenswrapper[4779]: E0929 19:08:10.939003 4779 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.102.83.201:6443: connect: connection refused" logger="UnhandledError" Sep 29 19:08:11 crc kubenswrapper[4779]: W0929 19:08:11.057226 4779 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.201:6443: connect: connection refused Sep 29 19:08:11 crc kubenswrapper[4779]: E0929 19:08:11.057377 4779 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.201:6443: connect: connection refused" logger="UnhandledError" Sep 29 19:08:11 crc kubenswrapper[4779]: E0929 19:08:11.120697 4779 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.201:6443: connect: connection refused" interval="1.6s" Sep 29 19:08:11 crc kubenswrapper[4779]: I0929 19:08:11.347892 4779 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 19:08:11 crc kubenswrapper[4779]: I0929 19:08:11.351914 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:11 crc kubenswrapper[4779]: I0929 19:08:11.351948 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:11 crc kubenswrapper[4779]: I0929 19:08:11.351958 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:11 crc kubenswrapper[4779]: I0929 19:08:11.351977 4779 kubelet_node_status.go:76] "Attempting to register node" node="crc" Sep 29 19:08:11 crc kubenswrapper[4779]: E0929 19:08:11.352516 4779 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.201:6443: connect: connection refused" node="crc" Sep 29 19:08:11 crc kubenswrapper[4779]: I0929 19:08:11.713357 4779 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.201:6443: connect: connection refused Sep 29 19:08:11 crc kubenswrapper[4779]: I0929 19:08:11.781595 4779 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="b55ee00d3478e07a7efaeda5a4986cccbb1438bee9630adef3feebc7be94f204" exitCode=0 Sep 29 19:08:11 crc kubenswrapper[4779]: I0929 19:08:11.781906 4779 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 19:08:11 crc kubenswrapper[4779]: I0929 19:08:11.782341 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"b55ee00d3478e07a7efaeda5a4986cccbb1438bee9630adef3feebc7be94f204"} Sep 29 19:08:11 crc kubenswrapper[4779]: I0929 19:08:11.783368 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:11 crc kubenswrapper[4779]: I0929 19:08:11.783402 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:11 crc kubenswrapper[4779]: I0929 19:08:11.783414 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:11 crc kubenswrapper[4779]: I0929 19:08:11.785379 4779 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 19:08:11 crc kubenswrapper[4779]: I0929 19:08:11.785376 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"02428a90ed2dd1c36d800ffda10ed8759d92a87ec8f467cdf9fb11511b9b9420"} Sep 29 19:08:11 crc kubenswrapper[4779]: I0929 19:08:11.785451 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"fa9b804f65dc9265342d6ec10b8cdd31d49f692289c7fd8292944e25a2c95ebe"} Sep 29 19:08:11 crc kubenswrapper[4779]: I0929 19:08:11.785473 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"a67efc46aeb5956568b680b4041d83a55c0410ccefdb4e43b95503c6416e1dfc"} Sep 29 19:08:11 crc kubenswrapper[4779]: I0929 19:08:11.785492 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"2253a3406f6de9463c1c615ad2ac76ce935067baa57b260f18afdb8d9e639e95"} Sep 29 19:08:11 crc kubenswrapper[4779]: I0929 19:08:11.786184 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:11 crc kubenswrapper[4779]: I0929 19:08:11.786215 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:11 crc kubenswrapper[4779]: I0929 19:08:11.786224 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:11 crc kubenswrapper[4779]: I0929 19:08:11.786943 4779 generic.go:334] "Generic (PLEG): container finished" podID="d1b160f5dda77d281dd8e69ec8d817f9" containerID="f5f839723495903958ba3bf2287ab952d82f2761280d67615bd5e6ff5da0fe2f" exitCode=0 Sep 29 19:08:11 crc kubenswrapper[4779]: I0929 19:08:11.787002 4779 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 19:08:11 crc kubenswrapper[4779]: I0929 19:08:11.787013 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerDied","Data":"f5f839723495903958ba3bf2287ab952d82f2761280d67615bd5e6ff5da0fe2f"} Sep 29 19:08:11 crc kubenswrapper[4779]: I0929 19:08:11.787734 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:11 crc kubenswrapper[4779]: I0929 19:08:11.787789 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:11 crc kubenswrapper[4779]: I0929 19:08:11.787807 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:11 crc kubenswrapper[4779]: I0929 19:08:11.788675 4779 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="5a737775c31cf8dc37838d904ee00f1075bc2aa52bea63587806cb7486950eaf" exitCode=0 Sep 29 19:08:11 crc kubenswrapper[4779]: I0929 19:08:11.788783 4779 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 19:08:11 crc kubenswrapper[4779]: I0929 19:08:11.789150 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"5a737775c31cf8dc37838d904ee00f1075bc2aa52bea63587806cb7486950eaf"} Sep 29 19:08:11 crc kubenswrapper[4779]: I0929 19:08:11.789497 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:11 crc kubenswrapper[4779]: I0929 19:08:11.789524 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:11 crc kubenswrapper[4779]: I0929 19:08:11.789537 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:11 crc kubenswrapper[4779]: I0929 19:08:11.792071 4779 generic.go:334] "Generic (PLEG): container finished" podID="3dcd261975c3d6b9a6ad6367fd4facd3" containerID="6fcb83ea7913513031d2c874a33ec806cf823bf8e3838378db3d9c94ca9229a6" exitCode=0 Sep 29 19:08:11 crc kubenswrapper[4779]: I0929 19:08:11.792103 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerDied","Data":"6fcb83ea7913513031d2c874a33ec806cf823bf8e3838378db3d9c94ca9229a6"} Sep 29 19:08:11 crc kubenswrapper[4779]: I0929 19:08:11.792215 4779 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 19:08:11 crc kubenswrapper[4779]: I0929 19:08:11.793004 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:11 crc kubenswrapper[4779]: I0929 19:08:11.793025 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:11 crc kubenswrapper[4779]: I0929 19:08:11.793036 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:11 crc kubenswrapper[4779]: I0929 19:08:11.793031 4779 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 19:08:11 crc kubenswrapper[4779]: I0929 19:08:11.794489 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:11 crc kubenswrapper[4779]: I0929 19:08:11.794537 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:11 crc kubenswrapper[4779]: I0929 19:08:11.794557 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:12 crc kubenswrapper[4779]: I0929 19:08:12.713162 4779 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.201:6443: connect: connection refused Sep 29 19:08:12 crc kubenswrapper[4779]: E0929 19:08:12.721688 4779 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.201:6443: connect: connection refused" interval="3.2s" Sep 29 19:08:12 crc kubenswrapper[4779]: I0929 19:08:12.796837 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"8893063a3ed083fa640798760bb57a5eb0ed66c648d2d052eff803d7e077390d"} Sep 29 19:08:12 crc kubenswrapper[4779]: I0929 19:08:12.796887 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"f0916d37ad679d190403229b40390af2860e24f777a07cd5ef0ac2f7d3fc3c41"} Sep 29 19:08:12 crc kubenswrapper[4779]: I0929 19:08:12.796892 4779 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 19:08:12 crc kubenswrapper[4779]: I0929 19:08:12.796901 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"8a017f2621de03e0184a0a168819a6762e4b36a1ae5c66a6bcfd1d65266fcb52"} Sep 29 19:08:12 crc kubenswrapper[4779]: I0929 19:08:12.797024 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"1a9691127cc0437ceebef7a82219adb83625690db4d6bf5cf1c8177a6a9e1284"} Sep 29 19:08:12 crc kubenswrapper[4779]: I0929 19:08:12.797050 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"dfa32890cde2a0a47211239f797f83380b4418ad2b8fb1560c779705e68df39c"} Sep 29 19:08:12 crc kubenswrapper[4779]: I0929 19:08:12.797582 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:12 crc kubenswrapper[4779]: I0929 19:08:12.797638 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:12 crc kubenswrapper[4779]: I0929 19:08:12.797648 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:12 crc kubenswrapper[4779]: I0929 19:08:12.798665 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"703b4d9d7a59ba642408a7920fd8c2753f1474f28acaa371e0a6b90c75bb7e91"} Sep 29 19:08:12 crc kubenswrapper[4779]: I0929 19:08:12.798693 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"72b9d027e1821e0de6cae982b60001ce62087f205bf5553d9d65c6cfe2d6e0c5"} Sep 29 19:08:12 crc kubenswrapper[4779]: I0929 19:08:12.798705 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"341b30347851b6ce8681296d5622e32df85756f3a372075489023227f0b7828b"} Sep 29 19:08:12 crc kubenswrapper[4779]: I0929 19:08:12.798711 4779 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 19:08:12 crc kubenswrapper[4779]: I0929 19:08:12.799492 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:12 crc kubenswrapper[4779]: I0929 19:08:12.799522 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:12 crc kubenswrapper[4779]: I0929 19:08:12.799532 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:12 crc kubenswrapper[4779]: I0929 19:08:12.799958 4779 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="845c619c8e4a593f4f9339b4623d2d9ccb2d8f9ffe7fc763bf39c47e8028513b" exitCode=0 Sep 29 19:08:12 crc kubenswrapper[4779]: I0929 19:08:12.800018 4779 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 19:08:12 crc kubenswrapper[4779]: I0929 19:08:12.800021 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"845c619c8e4a593f4f9339b4623d2d9ccb2d8f9ffe7fc763bf39c47e8028513b"} Sep 29 19:08:12 crc kubenswrapper[4779]: I0929 19:08:12.800601 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:12 crc kubenswrapper[4779]: I0929 19:08:12.800628 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:12 crc kubenswrapper[4779]: I0929 19:08:12.800640 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:12 crc kubenswrapper[4779]: I0929 19:08:12.801343 4779 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 19:08:12 crc kubenswrapper[4779]: I0929 19:08:12.801438 4779 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 19:08:12 crc kubenswrapper[4779]: I0929 19:08:12.801643 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"86f641e693891ab12f1181bd6f5b0f4ed94773aadcb61d65ee576eeb6493108b"} Sep 29 19:08:12 crc kubenswrapper[4779]: I0929 19:08:12.802213 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:12 crc kubenswrapper[4779]: I0929 19:08:12.802236 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:12 crc kubenswrapper[4779]: I0929 19:08:12.802245 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:12 crc kubenswrapper[4779]: I0929 19:08:12.802295 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:12 crc kubenswrapper[4779]: I0929 19:08:12.802315 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:12 crc kubenswrapper[4779]: I0929 19:08:12.802336 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:12 crc kubenswrapper[4779]: I0929 19:08:12.952868 4779 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 19:08:12 crc kubenswrapper[4779]: I0929 19:08:12.954164 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:12 crc kubenswrapper[4779]: I0929 19:08:12.954243 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:12 crc kubenswrapper[4779]: I0929 19:08:12.954264 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:12 crc kubenswrapper[4779]: I0929 19:08:12.954302 4779 kubelet_node_status.go:76] "Attempting to register node" node="crc" Sep 29 19:08:12 crc kubenswrapper[4779]: E0929 19:08:12.954902 4779 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.201:6443: connect: connection refused" node="crc" Sep 29 19:08:13 crc kubenswrapper[4779]: I0929 19:08:13.356057 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 29 19:08:13 crc kubenswrapper[4779]: I0929 19:08:13.805195 4779 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="2c237a8c288e9ea7f9619f02915e0ca61bad4b3b5ffe9534bf2bc9fbda441302" exitCode=0 Sep 29 19:08:13 crc kubenswrapper[4779]: I0929 19:08:13.805342 4779 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 19:08:13 crc kubenswrapper[4779]: I0929 19:08:13.805376 4779 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 19:08:13 crc kubenswrapper[4779]: I0929 19:08:13.806300 4779 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Sep 29 19:08:13 crc kubenswrapper[4779]: I0929 19:08:13.806351 4779 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 19:08:13 crc kubenswrapper[4779]: I0929 19:08:13.806503 4779 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 19:08:13 crc kubenswrapper[4779]: I0929 19:08:13.806532 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"2c237a8c288e9ea7f9619f02915e0ca61bad4b3b5ffe9534bf2bc9fbda441302"} Sep 29 19:08:13 crc kubenswrapper[4779]: I0929 19:08:13.806779 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 29 19:08:13 crc kubenswrapper[4779]: I0929 19:08:13.806932 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:13 crc kubenswrapper[4779]: I0929 19:08:13.806966 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:13 crc kubenswrapper[4779]: I0929 19:08:13.806988 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:13 crc kubenswrapper[4779]: I0929 19:08:13.809543 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:13 crc kubenswrapper[4779]: I0929 19:08:13.809590 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:13 crc kubenswrapper[4779]: I0929 19:08:13.809602 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:13 crc kubenswrapper[4779]: I0929 19:08:13.809622 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:13 crc kubenswrapper[4779]: I0929 19:08:13.809648 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:13 crc kubenswrapper[4779]: I0929 19:08:13.809663 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:13 crc kubenswrapper[4779]: I0929 19:08:13.811357 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:13 crc kubenswrapper[4779]: I0929 19:08:13.811426 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:13 crc kubenswrapper[4779]: I0929 19:08:13.811444 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:13 crc kubenswrapper[4779]: I0929 19:08:13.989588 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 29 19:08:14 crc kubenswrapper[4779]: I0929 19:08:14.299383 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Sep 29 19:08:14 crc kubenswrapper[4779]: I0929 19:08:14.299657 4779 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 19:08:14 crc kubenswrapper[4779]: I0929 19:08:14.301280 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:14 crc kubenswrapper[4779]: I0929 19:08:14.301379 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:14 crc kubenswrapper[4779]: I0929 19:08:14.301408 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:14 crc kubenswrapper[4779]: I0929 19:08:14.812934 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"b6423a6fa1e69da68b47af0b59980013d6bcc23b51d0b25bb0085fab03dc3e36"} Sep 29 19:08:14 crc kubenswrapper[4779]: I0929 19:08:14.812998 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"968e5da40da49dffb34c44e42d5a955975ab0fca1d9987312ee20b6c2ab42a52"} Sep 29 19:08:14 crc kubenswrapper[4779]: I0929 19:08:14.813018 4779 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 19:08:14 crc kubenswrapper[4779]: I0929 19:08:14.813022 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"b4598380e2d0b9f8258cb148abf8cfaa7a880d8aadc09c0d3416ab9ea976db3d"} Sep 29 19:08:14 crc kubenswrapper[4779]: I0929 19:08:14.813044 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"92369b55bd00c0b1116fcb69584f4ba6d73f00511343902f243682db2cc6ff8b"} Sep 29 19:08:14 crc kubenswrapper[4779]: I0929 19:08:14.814191 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:14 crc kubenswrapper[4779]: I0929 19:08:14.814256 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:14 crc kubenswrapper[4779]: I0929 19:08:14.814274 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:15 crc kubenswrapper[4779]: I0929 19:08:15.822020 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"6f5ea8a87c2318ebb245b796b274bda0d959d4666414d5b1bd6b03ffc309e141"} Sep 29 19:08:15 crc kubenswrapper[4779]: I0929 19:08:15.822102 4779 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 19:08:15 crc kubenswrapper[4779]: I0929 19:08:15.822122 4779 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 19:08:15 crc kubenswrapper[4779]: I0929 19:08:15.823624 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:15 crc kubenswrapper[4779]: I0929 19:08:15.823682 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:15 crc kubenswrapper[4779]: I0929 19:08:15.823710 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:15 crc kubenswrapper[4779]: I0929 19:08:15.823729 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:15 crc kubenswrapper[4779]: I0929 19:08:15.823686 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:15 crc kubenswrapper[4779]: I0929 19:08:15.823822 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:16 crc kubenswrapper[4779]: I0929 19:08:16.155571 4779 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 19:08:16 crc kubenswrapper[4779]: I0929 19:08:16.156896 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:16 crc kubenswrapper[4779]: I0929 19:08:16.156953 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:16 crc kubenswrapper[4779]: I0929 19:08:16.156971 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:16 crc kubenswrapper[4779]: I0929 19:08:16.157003 4779 kubelet_node_status.go:76] "Attempting to register node" node="crc" Sep 29 19:08:16 crc kubenswrapper[4779]: I0929 19:08:16.536941 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Sep 29 19:08:16 crc kubenswrapper[4779]: I0929 19:08:16.537174 4779 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 19:08:16 crc kubenswrapper[4779]: I0929 19:08:16.538694 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:16 crc kubenswrapper[4779]: I0929 19:08:16.538768 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:16 crc kubenswrapper[4779]: I0929 19:08:16.538794 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:16 crc kubenswrapper[4779]: I0929 19:08:16.716301 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-etcd/etcd-crc" Sep 29 19:08:16 crc kubenswrapper[4779]: I0929 19:08:16.826542 4779 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 19:08:16 crc kubenswrapper[4779]: I0929 19:08:16.828261 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:16 crc kubenswrapper[4779]: I0929 19:08:16.828368 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:16 crc kubenswrapper[4779]: I0929 19:08:16.828389 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:17 crc kubenswrapper[4779]: I0929 19:08:17.300226 4779 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/cluster-policy-controller namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10357/healthz\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" start-of-body= Sep 29 19:08:17 crc kubenswrapper[4779]: I0929 19:08:17.300401 4779 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="cluster-policy-controller" probeResult="failure" output="Get \"https://192.168.126.11:10357/healthz\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Sep 29 19:08:17 crc kubenswrapper[4779]: I0929 19:08:17.827375 4779 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 19:08:17 crc kubenswrapper[4779]: I0929 19:08:17.828759 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:17 crc kubenswrapper[4779]: I0929 19:08:17.828789 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:17 crc kubenswrapper[4779]: I0929 19:08:17.828800 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:19 crc kubenswrapper[4779]: E0929 19:08:19.839629 4779 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Sep 29 19:08:20 crc kubenswrapper[4779]: I0929 19:08:20.631989 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Sep 29 19:08:20 crc kubenswrapper[4779]: I0929 19:08:20.632276 4779 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 19:08:20 crc kubenswrapper[4779]: I0929 19:08:20.633840 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:20 crc kubenswrapper[4779]: I0929 19:08:20.633906 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:20 crc kubenswrapper[4779]: I0929 19:08:20.633926 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:20 crc kubenswrapper[4779]: I0929 19:08:20.638458 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Sep 29 19:08:20 crc kubenswrapper[4779]: I0929 19:08:20.835094 4779 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 19:08:20 crc kubenswrapper[4779]: I0929 19:08:20.835331 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Sep 29 19:08:20 crc kubenswrapper[4779]: I0929 19:08:20.835941 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:20 crc kubenswrapper[4779]: I0929 19:08:20.835976 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:20 crc kubenswrapper[4779]: I0929 19:08:20.836012 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:20 crc kubenswrapper[4779]: I0929 19:08:20.838990 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Sep 29 19:08:21 crc kubenswrapper[4779]: I0929 19:08:21.247473 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Sep 29 19:08:21 crc kubenswrapper[4779]: I0929 19:08:21.836676 4779 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 19:08:21 crc kubenswrapper[4779]: I0929 19:08:21.837764 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:21 crc kubenswrapper[4779]: I0929 19:08:21.837835 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:21 crc kubenswrapper[4779]: I0929 19:08:21.837860 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:22 crc kubenswrapper[4779]: I0929 19:08:22.839153 4779 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 19:08:22 crc kubenswrapper[4779]: I0929 19:08:22.840308 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:22 crc kubenswrapper[4779]: I0929 19:08:22.840391 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:22 crc kubenswrapper[4779]: I0929 19:08:22.840411 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:23 crc kubenswrapper[4779]: I0929 19:08:23.356138 4779 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="Get \"https://192.168.126.11:6443/livez\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" start-of-body= Sep 29 19:08:23 crc kubenswrapper[4779]: I0929 19:08:23.356240 4779 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="Get \"https://192.168.126.11:6443/livez\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Sep 29 19:08:23 crc kubenswrapper[4779]: W0929 19:08:23.412883 4779 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": net/http: TLS handshake timeout Sep 29 19:08:23 crc kubenswrapper[4779]: I0929 19:08:23.412975 4779 trace.go:236] Trace[1954961061]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (29-Sep-2025 19:08:13.411) (total time: 10001ms): Sep 29 19:08:23 crc kubenswrapper[4779]: Trace[1954961061]: ---"Objects listed" error:Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": net/http: TLS handshake timeout 10001ms (19:08:23.412) Sep 29 19:08:23 crc kubenswrapper[4779]: Trace[1954961061]: [10.001252855s] [10.001252855s] END Sep 29 19:08:23 crc kubenswrapper[4779]: E0929 19:08:23.412998 4779 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": net/http: TLS handshake timeout" logger="UnhandledError" Sep 29 19:08:23 crc kubenswrapper[4779]: W0929 19:08:23.548660 4779 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": net/http: TLS handshake timeout Sep 29 19:08:23 crc kubenswrapper[4779]: I0929 19:08:23.548798 4779 trace.go:236] Trace[307217039]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (29-Sep-2025 19:08:13.546) (total time: 10001ms): Sep 29 19:08:23 crc kubenswrapper[4779]: Trace[307217039]: ---"Objects listed" error:Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": net/http: TLS handshake timeout 10001ms (19:08:23.548) Sep 29 19:08:23 crc kubenswrapper[4779]: Trace[307217039]: [10.001770611s] [10.001770611s] END Sep 29 19:08:23 crc kubenswrapper[4779]: E0929 19:08:23.548833 4779 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": net/http: TLS handshake timeout" logger="UnhandledError" Sep 29 19:08:23 crc kubenswrapper[4779]: I0929 19:08:23.566754 4779 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Readiness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": read tcp 192.168.126.11:50072->192.168.126.11:17697: read: connection reset by peer" start-of-body= Sep 29 19:08:23 crc kubenswrapper[4779]: I0929 19:08:23.566824 4779 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": read tcp 192.168.126.11:50072->192.168.126.11:17697: read: connection reset by peer" Sep 29 19:08:23 crc kubenswrapper[4779]: W0929 19:08:23.572371 4779 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": net/http: TLS handshake timeout Sep 29 19:08:23 crc kubenswrapper[4779]: I0929 19:08:23.572497 4779 trace.go:236] Trace[1001342113]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (29-Sep-2025 19:08:13.570) (total time: 10001ms): Sep 29 19:08:23 crc kubenswrapper[4779]: Trace[1001342113]: ---"Objects listed" error:Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": net/http: TLS handshake timeout 10001ms (19:08:23.572) Sep 29 19:08:23 crc kubenswrapper[4779]: Trace[1001342113]: [10.001824406s] [10.001824406s] END Sep 29 19:08:23 crc kubenswrapper[4779]: E0929 19:08:23.572530 4779 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": net/http: TLS handshake timeout" logger="UnhandledError" Sep 29 19:08:23 crc kubenswrapper[4779]: I0929 19:08:23.713053 4779 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": net/http: TLS handshake timeout Sep 29 19:08:23 crc kubenswrapper[4779]: I0929 19:08:23.844828 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Sep 29 19:08:23 crc kubenswrapper[4779]: I0929 19:08:23.846646 4779 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="8893063a3ed083fa640798760bb57a5eb0ed66c648d2d052eff803d7e077390d" exitCode=255 Sep 29 19:08:23 crc kubenswrapper[4779]: I0929 19:08:23.846696 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"8893063a3ed083fa640798760bb57a5eb0ed66c648d2d052eff803d7e077390d"} Sep 29 19:08:23 crc kubenswrapper[4779]: I0929 19:08:23.846872 4779 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 19:08:23 crc kubenswrapper[4779]: I0929 19:08:23.847936 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:23 crc kubenswrapper[4779]: I0929 19:08:23.847981 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:23 crc kubenswrapper[4779]: I0929 19:08:23.847993 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:23 crc kubenswrapper[4779]: I0929 19:08:23.848620 4779 scope.go:117] "RemoveContainer" containerID="8893063a3ed083fa640798760bb57a5eb0ed66c648d2d052eff803d7e077390d" Sep 29 19:08:24 crc kubenswrapper[4779]: W0929 19:08:24.084310 4779 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": net/http: TLS handshake timeout Sep 29 19:08:24 crc kubenswrapper[4779]: I0929 19:08:24.084475 4779 trace.go:236] Trace[1551564807]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (29-Sep-2025 19:08:14.082) (total time: 10001ms): Sep 29 19:08:24 crc kubenswrapper[4779]: Trace[1551564807]: ---"Objects listed" error:Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": net/http: TLS handshake timeout 10001ms (19:08:24.084) Sep 29 19:08:24 crc kubenswrapper[4779]: Trace[1551564807]: [10.001712053s] [10.001712053s] END Sep 29 19:08:24 crc kubenswrapper[4779]: E0929 19:08:24.084511 4779 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": net/http: TLS handshake timeout" logger="UnhandledError" Sep 29 19:08:24 crc kubenswrapper[4779]: I0929 19:08:24.330539 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-etcd/etcd-crc" Sep 29 19:08:24 crc kubenswrapper[4779]: I0929 19:08:24.330730 4779 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 19:08:24 crc kubenswrapper[4779]: I0929 19:08:24.331816 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:24 crc kubenswrapper[4779]: I0929 19:08:24.331852 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:24 crc kubenswrapper[4779]: I0929 19:08:24.331862 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:24 crc kubenswrapper[4779]: I0929 19:08:24.349513 4779 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 403" start-of-body={"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/livez\"","reason":"Forbidden","details":{},"code":403} Sep 29 19:08:24 crc kubenswrapper[4779]: I0929 19:08:24.349585 4779 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 403" Sep 29 19:08:24 crc kubenswrapper[4779]: I0929 19:08:24.401407 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-etcd/etcd-crc" Sep 29 19:08:24 crc kubenswrapper[4779]: I0929 19:08:24.852227 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Sep 29 19:08:24 crc kubenswrapper[4779]: I0929 19:08:24.854695 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"7b14ae0b08f077b3acbe802e4a5ec06dd10075100c37d5587119a09a92b4651a"} Sep 29 19:08:24 crc kubenswrapper[4779]: I0929 19:08:24.854795 4779 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 19:08:24 crc kubenswrapper[4779]: I0929 19:08:24.855761 4779 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 19:08:24 crc kubenswrapper[4779]: I0929 19:08:24.855954 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:24 crc kubenswrapper[4779]: I0929 19:08:24.856001 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:24 crc kubenswrapper[4779]: I0929 19:08:24.856017 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:24 crc kubenswrapper[4779]: I0929 19:08:24.856949 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:24 crc kubenswrapper[4779]: I0929 19:08:24.856972 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:24 crc kubenswrapper[4779]: I0929 19:08:24.856984 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:24 crc kubenswrapper[4779]: I0929 19:08:24.878681 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-etcd/etcd-crc" Sep 29 19:08:25 crc kubenswrapper[4779]: I0929 19:08:25.857597 4779 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 19:08:25 crc kubenswrapper[4779]: I0929 19:08:25.859164 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:25 crc kubenswrapper[4779]: I0929 19:08:25.859220 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:25 crc kubenswrapper[4779]: I0929 19:08:25.859228 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:27 crc kubenswrapper[4779]: I0929 19:08:27.249333 4779 reflector.go:368] Caches populated for *v1.Service from k8s.io/client-go/informers/factory.go:160 Sep 29 19:08:27 crc kubenswrapper[4779]: I0929 19:08:27.300486 4779 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/cluster-policy-controller namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10357/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Sep 29 19:08:27 crc kubenswrapper[4779]: I0929 19:08:27.300807 4779 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="cluster-policy-controller" probeResult="failure" output="Get \"https://192.168.126.11:10357/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Sep 29 19:08:28 crc kubenswrapper[4779]: I0929 19:08:28.361338 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 29 19:08:28 crc kubenswrapper[4779]: I0929 19:08:28.361991 4779 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 19:08:28 crc kubenswrapper[4779]: I0929 19:08:28.362089 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 29 19:08:28 crc kubenswrapper[4779]: I0929 19:08:28.363231 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:28 crc kubenswrapper[4779]: I0929 19:08:28.363267 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:28 crc kubenswrapper[4779]: I0929 19:08:28.363280 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:28 crc kubenswrapper[4779]: I0929 19:08:28.368462 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 29 19:08:28 crc kubenswrapper[4779]: I0929 19:08:28.865385 4779 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 19:08:28 crc kubenswrapper[4779]: I0929 19:08:28.866781 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:28 crc kubenswrapper[4779]: I0929 19:08:28.866837 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:28 crc kubenswrapper[4779]: I0929 19:08:28.866850 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:29 crc kubenswrapper[4779]: E0929 19:08:29.340736 4779 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": context deadline exceeded" interval="6.4s" Sep 29 19:08:29 crc kubenswrapper[4779]: E0929 19:08:29.349285 4779 kubelet_node_status.go:99] "Unable to register node with API server" err="nodes \"crc\" is forbidden: autoscaling.openshift.io/ManagedNode infra config cache not synchronized" node="crc" Sep 29 19:08:29 crc kubenswrapper[4779]: I0929 19:08:29.356372 4779 reconstruct.go:205] "DevicePaths of reconstructed volumes updated" Sep 29 19:08:29 crc kubenswrapper[4779]: I0929 19:08:29.768446 4779 reflector.go:368] Caches populated for *v1.RuntimeClass from k8s.io/client-go/informers/factory.go:160 Sep 29 19:08:29 crc kubenswrapper[4779]: E0929 19:08:29.839779 4779 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Sep 29 19:08:29 crc kubenswrapper[4779]: I0929 19:08:29.867570 4779 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 19:08:29 crc kubenswrapper[4779]: I0929 19:08:29.868412 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:29 crc kubenswrapper[4779]: I0929 19:08:29.868439 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:29 crc kubenswrapper[4779]: I0929 19:08:29.868448 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:29 crc kubenswrapper[4779]: I0929 19:08:29.927414 4779 reflector.go:368] Caches populated for *v1.Node from k8s.io/client-go/informers/factory.go:160 Sep 29 19:08:29 crc kubenswrapper[4779]: I0929 19:08:29.990049 4779 reflector.go:368] Caches populated for *v1.CSIDriver from k8s.io/client-go/informers/factory.go:160 Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.711105 4779 apiserver.go:52] "Watching apiserver" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.713420 4779 reflector.go:368] Caches populated for *v1.Pod from pkg/kubelet/config/apiserver.go:66 Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.716223 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns/node-resolver-7hb2m","openshift-network-console/networking-console-plugin-85b44fc459-gdk6g","openshift-network-diagnostics/network-check-source-55646444c4-trplf","openshift-network-diagnostics/network-check-target-xd92c","openshift-network-node-identity/network-node-identity-vrzqb","openshift-network-operator/iptables-alerter-4ln5h","openshift-network-operator/network-operator-58b4c7f79c-55gtf"] Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.717083 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 19:08:30 crc kubenswrapper[4779]: E0929 19:08:30.717168 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.717589 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.718074 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-7hb2m" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.719010 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.720085 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.720123 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 19:08:30 crc kubenswrapper[4779]: E0929 19:08:30.720660 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.720138 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 19:08:30 crc kubenswrapper[4779]: E0929 19:08:30.720789 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.721001 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"node-resolver-dockercfg-kz9s7" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.721050 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"kube-root-ca.crt" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.722062 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-operator"/"metrics-tls" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.722241 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"ovnkube-identity-cm" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.722255 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"kube-root-ca.crt" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.725750 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"env-overrides" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.725968 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"openshift-service-ca.crt" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.726346 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"kube-root-ca.crt" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.726614 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"iptables-alerter-script" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.726713 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-node-identity"/"network-node-identity-cert" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.726738 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"openshift-service-ca.crt" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.726829 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"openshift-service-ca.crt" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.738219 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.750422 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-7hb2m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"45b89d12-bbd2-4b47-815d-a7421cc1aa00\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bvn4g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:30Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-7hb2m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.760963 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.769729 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.778702 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.797211 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.813021 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.815884 4779 desired_state_of_world_populator.go:154] "Finished populating initial desired state of world" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.828501 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.866009 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.866059 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.866078 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.866097 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.866115 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.866132 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.866150 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.866167 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.866182 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.866197 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.866246 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.866263 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.866558 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" (OuterVolumeSpecName: "kube-api-access-2w9zh") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "kube-api-access-2w9zh". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.866724 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" (OuterVolumeSpecName: "kube-api-access-mnrrd") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "kube-api-access-mnrrd". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.866860 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" (OuterVolumeSpecName: "utilities") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.867029 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" (OuterVolumeSpecName: "utilities") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.867138 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" (OuterVolumeSpecName: "mcc-auth-proxy-config") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "mcc-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.867204 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.866278 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.867259 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.867260 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" (OuterVolumeSpecName: "machine-api-operator-tls") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "machine-api-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.867276 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.867291 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.867298 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.867348 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.867482 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.867502 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" (OuterVolumeSpecName: "kube-api-access-279lb") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "kube-api-access-279lb". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.867291 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.867565 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.867580 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.867595 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.867609 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.867623 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.867637 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.867670 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.867864 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" (OuterVolumeSpecName: "kube-api-access-pj782") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "kube-api-access-pj782". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.867941 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.867949 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.867984 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" (OuterVolumeSpecName: "config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.868076 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" (OuterVolumeSpecName: "kube-api-access-x4zgh") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "kube-api-access-x4zgh". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.868533 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.868586 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.868613 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.868613 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" (OuterVolumeSpecName: "cni-sysctl-allowlist") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-sysctl-allowlist". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.868721 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.868739 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.868757 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.868812 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.869060 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.869106 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" (OuterVolumeSpecName: "kube-api-access-lzf88") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "kube-api-access-lzf88". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.869126 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" (OuterVolumeSpecName: "apiservice-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "apiservice-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.869334 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.869718 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.869887 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.869912 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.869927 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.869950 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.870014 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.870244 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.870337 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.870341 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.870050 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.870630 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.870649 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.870667 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") pod \"49ef4625-1d3a-4a9f-b595-c2433d32326d\" (UID: \"49ef4625-1d3a-4a9f-b595-c2433d32326d\") " Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.870722 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") pod \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\" (UID: \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\") " Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.870738 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.870759 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.870777 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/1.log" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.870773 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.870805 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.870820 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.870834 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.870853 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.870890 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" (OuterVolumeSpecName: "multus-daemon-config") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "multus-daemon-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.870978 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" (OuterVolumeSpecName: "kube-api-access-x2m85") pod "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" (UID: "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d"). InnerVolumeSpecName "kube-api-access-x2m85". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.871148 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" (OuterVolumeSpecName: "kube-api-access-pjr6v") pod "49ef4625-1d3a-4a9f-b595-c2433d32326d" (UID: "49ef4625-1d3a-4a9f-b595-c2433d32326d"). InnerVolumeSpecName "kube-api-access-pjr6v". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.871245 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" (OuterVolumeSpecName: "kube-api-access-pcxfs") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "kube-api-access-pcxfs". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.871280 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.871401 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.871412 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" (OuterVolumeSpecName: "kube-api-access-kfwg7") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "kube-api-access-kfwg7". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.871428 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" (OuterVolumeSpecName: "kube-api-access-dbsvg") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "kube-api-access-dbsvg". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.871444 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" (OuterVolumeSpecName: "kube-api-access-d6qdx") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "kube-api-access-d6qdx". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.871742 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" (OuterVolumeSpecName: "client-ca") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.871867 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.871959 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.872187 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.871970 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.872353 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.872377 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.872396 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.872411 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.872428 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.872444 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.872460 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.872476 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.872493 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.872509 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.872524 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.872539 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.872553 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.872830 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" (OuterVolumeSpecName: "kube-api-access-zgdk5") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "kube-api-access-zgdk5". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.872875 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.873150 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.873249 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.873278 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.873296 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.873311 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.873339 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.873378 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.873393 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.873408 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.873477 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.873494 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.873509 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.873541 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.873557 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.873575 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.873591 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.873607 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.873612 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.873624 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.873642 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.873658 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.873687 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.873705 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.873720 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.873735 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.873750 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.873766 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.873783 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.873798 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.873800 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" (OuterVolumeSpecName: "samples-operator-tls") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "samples-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.873813 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.873821 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.873872 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.873906 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.873931 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.873958 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.874036 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.874063 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.874075 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.874087 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.874090 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.874113 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.874141 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.874166 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.874190 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.874213 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.874235 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.874259 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.874283 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.874309 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.874356 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.874365 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.874381 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.874375 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.874411 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.874538 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" (OuterVolumeSpecName: "webhook-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "webhook-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.874537 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.874591 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.874669 4779 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="7b14ae0b08f077b3acbe802e4a5ec06dd10075100c37d5587119a09a92b4651a" exitCode=255 Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.874712 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"7b14ae0b08f077b3acbe802e4a5ec06dd10075100c37d5587119a09a92b4651a"} Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.874750 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.874791 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.874808 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.874912 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.874930 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.874948 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.874964 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.874980 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.875036 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.875075 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.875091 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.875110 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.875160 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.875179 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") pod \"44663579-783b-4372-86d6-acf235a62d72\" (UID: \"44663579-783b-4372-86d6-acf235a62d72\") " Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.875208 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.875226 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.875277 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.875292 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.875356 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.875372 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.875409 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.875455 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.875472 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.875487 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.875593 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.875623 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.875663 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.875679 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.875693 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.875721 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.875741 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.875764 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.875784 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.875861 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.875891 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.875911 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.875968 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.875984 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.876000 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.876026 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.876147 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.876162 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.876177 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.876214 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.876229 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.876256 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.876292 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.876307 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.876359 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.876438 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.876461 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.876476 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.876492 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.876518 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.876550 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.876587 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.876709 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.876737 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.876752 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.876767 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.876793 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.876831 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.876845 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.876861 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.876894 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.876917 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.876932 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.876979 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.876996 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.877012 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.877027 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.877043 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.877226 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.877243 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.877274 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.877336 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.877353 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.877443 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.877461 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.877477 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.877506 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.877670 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.877696 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.877711 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.877731 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.877747 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.877762 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.877790 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") pod \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\" (UID: \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\") " Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.877942 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/45b89d12-bbd2-4b47-815d-a7421cc1aa00-hosts-file\") pod \"node-resolver-7hb2m\" (UID: \"45b89d12-bbd2-4b47-815d-a7421cc1aa00\") " pod="openshift-dns/node-resolver-7hb2m" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.877974 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.878017 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.878034 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.878050 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.878081 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.878098 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.878118 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bvn4g\" (UniqueName: \"kubernetes.io/projected/45b89d12-bbd2-4b47-815d-a7421cc1aa00-kube-api-access-bvn4g\") pod \"node-resolver-7hb2m\" (UID: \"45b89d12-bbd2-4b47-815d-a7421cc1aa00\") " pod="openshift-dns/node-resolver-7hb2m" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.878144 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.878161 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.878193 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.878225 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.878267 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.878283 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.878347 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.878367 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.878529 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") on node \"crc\" DevicePath \"\"" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.878541 4779 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.878551 4779 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.878560 4779 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") on node \"crc\" DevicePath \"\"" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.878570 4779 reconciler_common.go:293] "Volume detached for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") on node \"crc\" DevicePath \"\"" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.878580 4779 reconciler_common.go:293] "Volume detached for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.878604 4779 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.878615 4779 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.878624 4779 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.878634 4779 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") on node \"crc\" DevicePath \"\"" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.878643 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") on node \"crc\" DevicePath \"\"" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.878652 4779 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.878674 4779 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.878684 4779 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") on node \"crc\" DevicePath \"\"" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.878694 4779 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") on node \"crc\" DevicePath \"\"" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.878704 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") on node \"crc\" DevicePath \"\"" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.878714 4779 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.878723 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") on node \"crc\" DevicePath \"\"" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.878733 4779 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") on node \"crc\" DevicePath \"\"" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.878742 4779 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.878751 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") on node \"crc\" DevicePath \"\"" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.878772 4779 reconciler_common.go:293] "Volume detached for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") on node \"crc\" DevicePath \"\"" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.878791 4779 reconciler_common.go:293] "Volume detached for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") on node \"crc\" DevicePath \"\"" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.878800 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") on node \"crc\" DevicePath \"\"" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.878809 4779 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") on node \"crc\" DevicePath \"\"" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.878818 4779 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.878827 4779 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.878836 4779 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.878845 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") on node \"crc\" DevicePath \"\"" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.878854 4779 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") on node \"crc\" DevicePath \"\"" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.878863 4779 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.878872 4779 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.878882 4779 reconciler_common.go:293] "Volume detached for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") on node \"crc\" DevicePath \"\"" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.878907 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") on node \"crc\" DevicePath \"\"" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.878917 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") on node \"crc\" DevicePath \"\"" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.878927 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") on node \"crc\" DevicePath \"\"" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.878936 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") on node \"crc\" DevicePath \"\"" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.878945 4779 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") on node \"crc\" DevicePath \"\"" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.878955 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") on node \"crc\" DevicePath \"\"" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.878964 4779 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.878987 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") on node \"crc\" DevicePath \"\"" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.878996 4779 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") on node \"crc\" DevicePath \"\"" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.879005 4779 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") on node \"crc\" DevicePath \"\"" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.879014 4779 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") on node \"crc\" DevicePath \"\"" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.879035 4779 reconciler_common.go:293] "Volume detached for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") on node \"crc\" DevicePath \"\"" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.879044 4779 reconciler_common.go:293] "Volume detached for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") on node \"crc\" DevicePath \"\"" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.879063 4779 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.879073 4779 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.879082 4779 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") on node \"crc\" DevicePath \"\"" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.879091 4779 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.879102 4779 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") on node \"crc\" DevicePath \"\"" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.879112 4779 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") on node \"crc\" DevicePath \"\"" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.874770 4779 scope.go:117] "RemoveContainer" containerID="8893063a3ed083fa640798760bb57a5eb0ed66c648d2d052eff803d7e077390d" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.889060 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.889629 4779 scope.go:117] "RemoveContainer" containerID="7b14ae0b08f077b3acbe802e4a5ec06dd10075100c37d5587119a09a92b4651a" Sep 29 19:08:30 crc kubenswrapper[4779]: E0929 19:08:30.889894 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-apiserver-check-endpoints\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\"" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.875759 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.877411 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.877807 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" (OuterVolumeSpecName: "kube-api-access-jkwtn") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "kube-api-access-jkwtn". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.878093 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.878227 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" (OuterVolumeSpecName: "ovn-control-plane-metrics-cert") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovn-control-plane-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.878590 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.878746 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" (OuterVolumeSpecName: "kube-api-access-4d4hj") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "kube-api-access-4d4hj". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.879199 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" (OuterVolumeSpecName: "webhook-certs") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "webhook-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.879272 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.879682 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.879746 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.879944 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" (OuterVolumeSpecName: "certs") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.879941 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.880728 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.880728 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" (OuterVolumeSpecName: "kube-api-access-8tdtz") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "kube-api-access-8tdtz". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.881056 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" (OuterVolumeSpecName: "kube-api-access-qs4fp") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "kube-api-access-qs4fp". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.883811 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.884050 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" (OuterVolumeSpecName: "config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.884054 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" (OuterVolumeSpecName: "kube-api-access-w9rds") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "kube-api-access-w9rds". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.884445 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" (OuterVolumeSpecName: "image-registry-operator-tls") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "image-registry-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.884709 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" (OuterVolumeSpecName: "kube-api-access-nzwt7") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "kube-api-access-nzwt7". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.884760 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" (OuterVolumeSpecName: "config") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.884950 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.885059 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" (OuterVolumeSpecName: "default-certificate") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "default-certificate". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.885291 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.885730 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" (OuterVolumeSpecName: "available-featuregates") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "available-featuregates". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.885964 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.885938 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.886465 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" (OuterVolumeSpecName: "audit") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "audit". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.886762 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.887369 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" (OuterVolumeSpecName: "kube-api-access-zkvpv") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "kube-api-access-zkvpv". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.887425 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.887691 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.887968 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.888304 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" (OuterVolumeSpecName: "serviceca") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "serviceca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.888787 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.888874 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" (OuterVolumeSpecName: "kube-api-access-7c4vf") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "kube-api-access-7c4vf". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.889091 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" (OuterVolumeSpecName: "kube-api-access-rnphk") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "kube-api-access-rnphk". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.889124 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.889165 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" (OuterVolumeSpecName: "kube-api-access-2d4wz") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "kube-api-access-2d4wz". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.889485 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.889883 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" (OuterVolumeSpecName: "control-plane-machine-set-operator-tls") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "control-plane-machine-set-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.890019 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" (OuterVolumeSpecName: "etcd-service-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.890167 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" (OuterVolumeSpecName: "kube-api-access-lz9wn") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "kube-api-access-lz9wn". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.890061 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" (OuterVolumeSpecName: "etcd-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.894047 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" (OuterVolumeSpecName: "kube-api-access-xcphl") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "kube-api-access-xcphl". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.890368 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" (OuterVolumeSpecName: "kube-api-access-qg5z5") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "kube-api-access-qg5z5". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.890682 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" (OuterVolumeSpecName: "kube-api-access-sb6h7") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "kube-api-access-sb6h7". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.890720 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" (OuterVolumeSpecName: "config") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.890885 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" (OuterVolumeSpecName: "kube-api-access-tk88c") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "kube-api-access-tk88c". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.890976 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" (OuterVolumeSpecName: "kube-api-access-9xfj7") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "kube-api-access-9xfj7". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.894079 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" (OuterVolumeSpecName: "config") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.891181 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" (OuterVolumeSpecName: "config-volume") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.891219 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.891548 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.891554 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" (OuterVolumeSpecName: "images") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.891946 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.892695 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" (OuterVolumeSpecName: "kube-api-access-xcgwh") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "kube-api-access-xcgwh". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.893220 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" (OuterVolumeSpecName: "utilities") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.893462 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" (OuterVolumeSpecName: "node-bootstrap-token") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "node-bootstrap-token". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.891456 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" (OuterVolumeSpecName: "kube-api-access-vt5rc") pod "44663579-783b-4372-86d6-acf235a62d72" (UID: "44663579-783b-4372-86d6-acf235a62d72"). InnerVolumeSpecName "kube-api-access-vt5rc". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.894369 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.894613 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" (OuterVolumeSpecName: "mcd-auth-proxy-config") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "mcd-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.894772 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" (OuterVolumeSpecName: "machine-approver-tls") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "machine-approver-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.895064 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.895177 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" (OuterVolumeSpecName: "config") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.895341 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" (OuterVolumeSpecName: "kube-api-access-fcqwp") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "kube-api-access-fcqwp". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.895723 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.895723 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.895894 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" (OuterVolumeSpecName: "stats-auth") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "stats-auth". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.896113 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" (OuterVolumeSpecName: "client-ca") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.896247 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" (OuterVolumeSpecName: "service-ca") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.896558 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" (OuterVolumeSpecName: "kube-api-access-w4xd4") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "kube-api-access-w4xd4". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.896666 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" (OuterVolumeSpecName: "image-import-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "image-import-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.896990 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" (OuterVolumeSpecName: "tmpfs") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "tmpfs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.896991 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.897223 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" (OuterVolumeSpecName: "kube-api-access-mg5zb") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "kube-api-access-mg5zb". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.897300 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" (OuterVolumeSpecName: "config") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.897382 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.897710 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" (OuterVolumeSpecName: "images") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.897973 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" (OuterVolumeSpecName: "kube-api-access-bf2bz") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "kube-api-access-bf2bz". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.898082 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.898702 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" (OuterVolumeSpecName: "kube-api-access-cfbct") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "kube-api-access-cfbct". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.899143 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" (OuterVolumeSpecName: "kube-api-access-fqsjt") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "kube-api-access-fqsjt". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.900018 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.900763 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.900785 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" (OuterVolumeSpecName: "cert") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.900874 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" (OuterVolumeSpecName: "signing-cabundle") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-cabundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.900896 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.901172 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" (OuterVolumeSpecName: "kube-api-access-w7l8j") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "kube-api-access-w7l8j". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.901215 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" (OuterVolumeSpecName: "kube-api-access-wxkg8") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "kube-api-access-wxkg8". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.902135 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.902391 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" (OuterVolumeSpecName: "config") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.902710 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.902765 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.903039 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.903072 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.903307 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" (OuterVolumeSpecName: "kube-api-access-gf66m") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "kube-api-access-gf66m". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.903554 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" (OuterVolumeSpecName: "config") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.903942 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.904511 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" (OuterVolumeSpecName: "kube-api-access-x7zkh") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "kube-api-access-x7zkh". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.904860 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" (OuterVolumeSpecName: "config") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.906013 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" (OuterVolumeSpecName: "config") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.906271 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" (OuterVolumeSpecName: "config") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.906555 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.906606 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" (OuterVolumeSpecName: "kube-api-access-s4n52") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "kube-api-access-s4n52". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.906719 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" (OuterVolumeSpecName: "kube-api-access-d4lsv") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "kube-api-access-d4lsv". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.906679 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" (OuterVolumeSpecName: "utilities") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.907086 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" (OuterVolumeSpecName: "kube-api-access-249nr") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "kube-api-access-249nr". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.907182 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.907225 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.907223 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" (OuterVolumeSpecName: "signing-key") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.907705 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" (OuterVolumeSpecName: "kube-api-access-6g6sz") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "kube-api-access-6g6sz". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.907731 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" (OuterVolumeSpecName: "service-ca") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.907853 4779 swap_util.go:74] "error creating dir to test if tmpfs noswap is enabled. Assuming not supported" mount path="" error="stat /var/lib/kubelet/plugins/kubernetes.io/empty-dir: no such file or directory" Sep 29 19:08:30 crc kubenswrapper[4779]: E0929 19:08:30.907908 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 19:08:31.407883309 +0000 UTC m=+22.292308399 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.908287 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.908566 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.908668 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.908723 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.908786 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.908895 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" (OuterVolumeSpecName: "config") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.909305 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.909367 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-daemon-d5cnr"] Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.909669 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" (OuterVolumeSpecName: "kube-api-access-jhbk2") pod "bd23aa5c-e532-4e53-bccf-e79f130c5ae8" (UID: "bd23aa5c-e532-4e53-bccf-e79f130c5ae8"). InnerVolumeSpecName "kube-api-access-jhbk2". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.910027 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" (OuterVolumeSpecName: "package-server-manager-serving-cert") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "package-server-manager-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.908960 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.908043 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:08:30 crc kubenswrapper[4779]: E0929 19:08:30.911188 4779 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.912950 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-additional-cni-plugins-cvx8m"] Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.913051 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.911900 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" (OuterVolumeSpecName: "console-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:08:30 crc kubenswrapper[4779]: E0929 19:08:30.912860 4779 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Sep 29 19:08:30 crc kubenswrapper[4779]: E0929 19:08:30.923644 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-09-29 19:08:31.423624968 +0000 UTC m=+22.308050058 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.913574 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.922270 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Sep 29 19:08:30 crc kubenswrapper[4779]: E0929 19:08:30.923705 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-09-29 19:08:31.423684349 +0000 UTC m=+22.308109449 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.922247 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.913160 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" (OuterVolumeSpecName: "kube-api-access-ngvvp") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "kube-api-access-ngvvp". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.913552 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:08:30 crc kubenswrapper[4779]: E0929 19:08:30.923119 4779 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.922346 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Sep 29 19:08:30 crc kubenswrapper[4779]: E0929 19:08:30.923810 4779 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Sep 29 19:08:30 crc kubenswrapper[4779]: E0929 19:08:30.923832 4779 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 29 19:08:30 crc kubenswrapper[4779]: E0929 19:08:30.923912 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-09-29 19:08:31.423891945 +0000 UTC m=+22.308317055 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.922179 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.926103 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.927034 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.927344 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.928010 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-42vjg"] Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.928388 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" (OuterVolumeSpecName: "kube-api-access-v47cf") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "kube-api-access-v47cf". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.928743 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-jfbb6"] Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.928948 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-jfbb6" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.929065 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-cvx8m" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.929265 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.929438 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-42vjg" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.929654 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.929866 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.930737 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.930839 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.932130 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" (OuterVolumeSpecName: "config") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.932255 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-rbac-proxy" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.932857 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.932301 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"openshift-service-ca.crt" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.932357 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.932403 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"proxy-tls" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.933253 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.932442 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-root-ca.crt" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.932546 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" (OuterVolumeSpecName: "kube-api-access-6ccd8") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "kube-api-access-6ccd8". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.932649 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.932762 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" (OuterVolumeSpecName: "kube-api-access-htfz6") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "kube-api-access-htfz6". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.936726 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.937190 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" (OuterVolumeSpecName: "config") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:08:30 crc kubenswrapper[4779]: E0929 19:08:30.939629 4779 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Sep 29 19:08:30 crc kubenswrapper[4779]: E0929 19:08:30.939854 4779 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.939956 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"kube-root-ca.crt" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.939973 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-script-lib" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.939986 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"cni-copy-resources" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.939643 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"kube-root-ca.crt" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.939715 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"default-cni-sysctl-allowlist" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.940160 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-config" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.939818 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"openshift-service-ca.crt" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.939874 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-node-dockercfg-pwtwl" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.939919 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ancillary-tools-dockercfg-vnmsz" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.940313 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"openshift-service-ca.crt" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.939945 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"default-dockercfg-2q5b6" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.940393 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-node-metrics-cert" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.939876 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"multus-daemon-config" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.940509 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"env-overrides" Sep 29 19:08:30 crc kubenswrapper[4779]: E0929 19:08:30.943401 4779 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.943572 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 19:08:30 crc kubenswrapper[4779]: E0929 19:08:30.943780 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-09-29 19:08:31.443620488 +0000 UTC m=+22.328045588 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.951963 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.954069 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-7hb2m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"45b89d12-bbd2-4b47-815d-a7421cc1aa00\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bvn4g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:30Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-7hb2m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.961415 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.966137 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.975896 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.979793 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/476bc421-1113-455e-bcc8-e207e47dad19-rootfs\") pod \"machine-config-daemon-d5cnr\" (UID: \"476bc421-1113-455e-bcc8-e207e47dad19\") " pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.979833 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/476bc421-1113-455e-bcc8-e207e47dad19-proxy-tls\") pod \"machine-config-daemon-d5cnr\" (UID: \"476bc421-1113-455e-bcc8-e207e47dad19\") " pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.979853 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.979879 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/476bc421-1113-455e-bcc8-e207e47dad19-mcd-auth-proxy-config\") pod \"machine-config-daemon-d5cnr\" (UID: \"476bc421-1113-455e-bcc8-e207e47dad19\") " pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.979897 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/45b89d12-bbd2-4b47-815d-a7421cc1aa00-hosts-file\") pod \"node-resolver-7hb2m\" (UID: \"45b89d12-bbd2-4b47-815d-a7421cc1aa00\") " pod="openshift-dns/node-resolver-7hb2m" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.979921 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.979937 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bvn4g\" (UniqueName: \"kubernetes.io/projected/45b89d12-bbd2-4b47-815d-a7421cc1aa00-kube-api-access-bvn4g\") pod \"node-resolver-7hb2m\" (UID: \"45b89d12-bbd2-4b47-815d-a7421cc1aa00\") " pod="openshift-dns/node-resolver-7hb2m" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.979955 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tb7zk\" (UniqueName: \"kubernetes.io/projected/476bc421-1113-455e-bcc8-e207e47dad19-kube-api-access-tb7zk\") pod \"machine-config-daemon-d5cnr\" (UID: \"476bc421-1113-455e-bcc8-e207e47dad19\") " pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.980078 4779 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.980089 4779 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") on node \"crc\" DevicePath \"\"" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.980098 4779 reconciler_common.go:293] "Volume detached for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") on node \"crc\" DevicePath \"\"" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.980107 4779 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.980117 4779 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.980125 4779 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.980136 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") on node \"crc\" DevicePath \"\"" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.980145 4779 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.980154 4779 reconciler_common.go:293] "Volume detached for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.980163 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") on node \"crc\" DevicePath \"\"" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.980172 4779 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.980182 4779 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.980190 4779 reconciler_common.go:293] "Volume detached for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") on node \"crc\" DevicePath \"\"" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.980199 4779 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.980208 4779 reconciler_common.go:293] "Volume detached for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") on node \"crc\" DevicePath \"\"" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.980217 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") on node \"crc\" DevicePath \"\"" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.980226 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") on node \"crc\" DevicePath \"\"" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.980234 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") on node \"crc\" DevicePath \"\"" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.980243 4779 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") on node \"crc\" DevicePath \"\"" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.980251 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") on node \"crc\" DevicePath \"\"" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.980260 4779 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.980269 4779 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.980396 4779 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.980421 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.980491 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/45b89d12-bbd2-4b47-815d-a7421cc1aa00-hosts-file\") pod \"node-resolver-7hb2m\" (UID: \"45b89d12-bbd2-4b47-815d-a7421cc1aa00\") " pod="openshift-dns/node-resolver-7hb2m" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.980516 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.980530 4779 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.980540 4779 reconciler_common.go:293] "Volume detached for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") on node \"crc\" DevicePath \"\"" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.980550 4779 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.980559 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") on node \"crc\" DevicePath \"\"" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.980568 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") on node \"crc\" DevicePath \"\"" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.980577 4779 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.980585 4779 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") on node \"crc\" DevicePath \"\"" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.980593 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") on node \"crc\" DevicePath \"\"" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.980601 4779 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") on node \"crc\" DevicePath \"\"" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.980610 4779 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.980618 4779 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") on node \"crc\" DevicePath \"\"" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.980630 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") on node \"crc\" DevicePath \"\"" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.980696 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") on node \"crc\" DevicePath \"\"" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.980708 4779 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.980717 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") on node \"crc\" DevicePath \"\"" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.980726 4779 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") on node \"crc\" DevicePath \"\"" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.980747 4779 reconciler_common.go:293] "Volume detached for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") on node \"crc\" DevicePath \"\"" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.980757 4779 reconciler_common.go:293] "Volume detached for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") on node \"crc\" DevicePath \"\"" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.980766 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") on node \"crc\" DevicePath \"\"" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.980774 4779 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.980782 4779 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") on node \"crc\" DevicePath \"\"" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.980791 4779 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.980799 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") on node \"crc\" DevicePath \"\"" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.980877 4779 reconciler_common.go:293] "Volume detached for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") on node \"crc\" DevicePath \"\"" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.980895 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") on node \"crc\" DevicePath \"\"" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.980910 4779 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.980921 4779 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") on node \"crc\" DevicePath \"\"" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.980932 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") on node \"crc\" DevicePath \"\"" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.980943 4779 reconciler_common.go:293] "Volume detached for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") on node \"crc\" DevicePath \"\"" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.980953 4779 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") on node \"crc\" DevicePath \"\"" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.980963 4779 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.980974 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") on node \"crc\" DevicePath \"\"" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.980984 4779 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") on node \"crc\" DevicePath \"\"" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.980996 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") on node \"crc\" DevicePath \"\"" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.981008 4779 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") on node \"crc\" DevicePath \"\"" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.981019 4779 reconciler_common.go:293] "Volume detached for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") on node \"crc\" DevicePath \"\"" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.981029 4779 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.981041 4779 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") on node \"crc\" DevicePath \"\"" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.981052 4779 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") on node \"crc\" DevicePath \"\"" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.981061 4779 reconciler_common.go:293] "Volume detached for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") on node \"crc\" DevicePath \"\"" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.981071 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") on node \"crc\" DevicePath \"\"" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.981081 4779 reconciler_common.go:293] "Volume detached for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") on node \"crc\" DevicePath \"\"" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.981093 4779 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") on node \"crc\" DevicePath \"\"" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.981105 4779 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.981119 4779 reconciler_common.go:293] "Volume detached for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") on node \"crc\" DevicePath \"\"" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.981131 4779 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.981140 4779 reconciler_common.go:293] "Volume detached for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") on node \"crc\" DevicePath \"\"" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.981151 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") on node \"crc\" DevicePath \"\"" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.981161 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") on node \"crc\" DevicePath \"\"" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.981171 4779 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.981181 4779 reconciler_common.go:293] "Volume detached for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") on node \"crc\" DevicePath \"\"" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.981191 4779 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.981201 4779 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.981226 4779 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") on node \"crc\" DevicePath \"\"" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.981236 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") on node \"crc\" DevicePath \"\"" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.981248 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") on node \"crc\" DevicePath \"\"" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.981260 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") on node \"crc\" DevicePath \"\"" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.981270 4779 reconciler_common.go:293] "Volume detached for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") on node \"crc\" DevicePath \"\"" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.981279 4779 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") on node \"crc\" DevicePath \"\"" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.981288 4779 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") on node \"crc\" DevicePath \"\"" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.981297 4779 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") on node \"crc\" DevicePath \"\"" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.981306 4779 reconciler_common.go:293] "Volume detached for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") on node \"crc\" DevicePath \"\"" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.981330 4779 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.981340 4779 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.981349 4779 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.981360 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") on node \"crc\" DevicePath \"\"" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.981369 4779 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") on node \"crc\" DevicePath \"\"" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.981379 4779 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") on node \"crc\" DevicePath \"\"" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.981388 4779 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") on node \"crc\" DevicePath \"\"" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.981399 4779 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") on node \"crc\" DevicePath \"\"" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.981409 4779 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.981418 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") on node \"crc\" DevicePath \"\"" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.981428 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") on node \"crc\" DevicePath \"\"" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.981438 4779 reconciler_common.go:293] "Volume detached for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") on node \"crc\" DevicePath \"\"" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.981447 4779 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") on node \"crc\" DevicePath \"\"" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.981456 4779 reconciler_common.go:293] "Volume detached for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") on node \"crc\" DevicePath \"\"" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.981468 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") on node \"crc\" DevicePath \"\"" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.981478 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") on node \"crc\" DevicePath \"\"" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.981488 4779 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") on node \"crc\" DevicePath \"\"" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.981500 4779 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.981518 4779 reconciler_common.go:293] "Volume detached for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") on node \"crc\" DevicePath \"\"" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.981528 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") on node \"crc\" DevicePath \"\"" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.981538 4779 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") on node \"crc\" DevicePath \"\"" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.981547 4779 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") on node \"crc\" DevicePath \"\"" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.981556 4779 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") on node \"crc\" DevicePath \"\"" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.981566 4779 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") on node \"crc\" DevicePath \"\"" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.981575 4779 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") on node \"crc\" DevicePath \"\"" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.981584 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") on node \"crc\" DevicePath \"\"" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.981594 4779 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") on node \"crc\" DevicePath \"\"" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.981604 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") on node \"crc\" DevicePath \"\"" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.981613 4779 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.981623 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") on node \"crc\" DevicePath \"\"" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.981633 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") on node \"crc\" DevicePath \"\"" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.981643 4779 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") on node \"crc\" DevicePath \"\"" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.981654 4779 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.981664 4779 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") on node \"crc\" DevicePath \"\"" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.981673 4779 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.981683 4779 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") on node \"crc\" DevicePath \"\"" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.981700 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") on node \"crc\" DevicePath \"\"" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.981709 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") on node \"crc\" DevicePath \"\"" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.981722 4779 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") on node \"crc\" DevicePath \"\"" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.981732 4779 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.981743 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") on node \"crc\" DevicePath \"\"" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.981753 4779 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") on node \"crc\" DevicePath \"\"" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.981764 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") on node \"crc\" DevicePath \"\"" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.981775 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") on node \"crc\" DevicePath \"\"" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.981785 4779 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.981796 4779 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") on node \"crc\" DevicePath \"\"" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.981806 4779 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.981815 4779 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") on node \"crc\" DevicePath \"\"" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.981825 4779 reconciler_common.go:293] "Volume detached for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.981834 4779 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") on node \"crc\" DevicePath \"\"" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.981843 4779 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") on node \"crc\" DevicePath \"\"" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.981852 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") on node \"crc\" DevicePath \"\"" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.981862 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") on node \"crc\" DevicePath \"\"" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.981871 4779 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.981881 4779 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.981891 4779 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.981900 4779 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") on node \"crc\" DevicePath \"\"" Sep 29 19:08:30 crc kubenswrapper[4779]: I0929 19:08:30.986076 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 19:08:31 crc kubenswrapper[4779]: I0929 19:08:30.999975 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bvn4g\" (UniqueName: \"kubernetes.io/projected/45b89d12-bbd2-4b47-815d-a7421cc1aa00-kube-api-access-bvn4g\") pod \"node-resolver-7hb2m\" (UID: \"45b89d12-bbd2-4b47-815d-a7421cc1aa00\") " pod="openshift-dns/node-resolver-7hb2m" Sep 29 19:08:31 crc kubenswrapper[4779]: I0929 19:08:31.002782 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 19:08:31 crc kubenswrapper[4779]: I0929 19:08:31.013604 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 19:08:31 crc kubenswrapper[4779]: I0929 19:08:31.022739 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"476bc421-1113-455e-bcc8-e207e47dad19\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tb7zk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tb7zk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:30Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-d5cnr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 19:08:31 crc kubenswrapper[4779]: I0929 19:08:31.035927 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Sep 29 19:08:31 crc kubenswrapper[4779]: I0929 19:08:31.036050 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jfbb6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3ac24bbf-c37a-4253-be71-8d8f15cfd48e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8ckjq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:30Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jfbb6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 19:08:31 crc kubenswrapper[4779]: I0929 19:08:31.044152 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-7hb2m" Sep 29 19:08:31 crc kubenswrapper[4779]: I0929 19:08:31.049717 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Sep 29 19:08:31 crc kubenswrapper[4779]: I0929 19:08:31.050044 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-cvx8m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"54a33b8e-b623-4f91-be1d-a38dfcef17d7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:30Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-cvx8m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 19:08:31 crc kubenswrapper[4779]: I0929 19:08:31.058226 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Sep 29 19:08:31 crc kubenswrapper[4779]: W0929 19:08:31.060872 4779 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod45b89d12_bbd2_4b47_815d_a7421cc1aa00.slice/crio-f46ad163a7a29ec535040914e1e1a527a870add565561f889a5c9ce694efde41 WatchSource:0}: Error finding container f46ad163a7a29ec535040914e1e1a527a870add565561f889a5c9ce694efde41: Status 404 returned error can't find the container with id f46ad163a7a29ec535040914e1e1a527a870add565561f889a5c9ce694efde41 Sep 29 19:08:31 crc kubenswrapper[4779]: W0929 19:08:31.063242 4779 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod37a5e44f_9a88_4405_be8a_b645485e7312.slice/crio-d997424539c521cfc8d60f6aab7a98efcb7073d8a70b0eba1e5340d84c915aa8 WatchSource:0}: Error finding container d997424539c521cfc8d60f6aab7a98efcb7073d8a70b0eba1e5340d84c915aa8: Status 404 returned error can't find the container with id d997424539c521cfc8d60f6aab7a98efcb7073d8a70b0eba1e5340d84c915aa8 Sep 29 19:08:31 crc kubenswrapper[4779]: I0929 19:08:31.065711 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 19:08:31 crc kubenswrapper[4779]: I0929 19:08:31.075938 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 19:08:31 crc kubenswrapper[4779]: I0929 19:08:31.082570 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/3ac24bbf-c37a-4253-be71-8d8f15cfd48e-os-release\") pod \"multus-jfbb6\" (UID: \"3ac24bbf-c37a-4253-be71-8d8f15cfd48e\") " pod="openshift-multus/multus-jfbb6" Sep 29 19:08:31 crc kubenswrapper[4779]: I0929 19:08:31.082617 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8ckjq\" (UniqueName: \"kubernetes.io/projected/3ac24bbf-c37a-4253-be71-8d8f15cfd48e-kube-api-access-8ckjq\") pod \"multus-jfbb6\" (UID: \"3ac24bbf-c37a-4253-be71-8d8f15cfd48e\") " pod="openshift-multus/multus-jfbb6" Sep 29 19:08:31 crc kubenswrapper[4779]: I0929 19:08:31.082640 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/046df2ef-fb75-4d32-93e6-17b36af0a7c2-systemd-units\") pod \"ovnkube-node-42vjg\" (UID: \"046df2ef-fb75-4d32-93e6-17b36af0a7c2\") " pod="openshift-ovn-kubernetes/ovnkube-node-42vjg" Sep 29 19:08:31 crc kubenswrapper[4779]: I0929 19:08:31.082662 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/046df2ef-fb75-4d32-93e6-17b36af0a7c2-run-openvswitch\") pod \"ovnkube-node-42vjg\" (UID: \"046df2ef-fb75-4d32-93e6-17b36af0a7c2\") " pod="openshift-ovn-kubernetes/ovnkube-node-42vjg" Sep 29 19:08:31 crc kubenswrapper[4779]: I0929 19:08:31.082698 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/54a33b8e-b623-4f91-be1d-a38dfcef17d7-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-cvx8m\" (UID: \"54a33b8e-b623-4f91-be1d-a38dfcef17d7\") " pod="openshift-multus/multus-additional-cni-plugins-cvx8m" Sep 29 19:08:31 crc kubenswrapper[4779]: I0929 19:08:31.082751 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/046df2ef-fb75-4d32-93e6-17b36af0a7c2-ovnkube-config\") pod \"ovnkube-node-42vjg\" (UID: \"046df2ef-fb75-4d32-93e6-17b36af0a7c2\") " pod="openshift-ovn-kubernetes/ovnkube-node-42vjg" Sep 29 19:08:31 crc kubenswrapper[4779]: I0929 19:08:31.082891 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/3ac24bbf-c37a-4253-be71-8d8f15cfd48e-host-run-k8s-cni-cncf-io\") pod \"multus-jfbb6\" (UID: \"3ac24bbf-c37a-4253-be71-8d8f15cfd48e\") " pod="openshift-multus/multus-jfbb6" Sep 29 19:08:31 crc kubenswrapper[4779]: I0929 19:08:31.082949 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/046df2ef-fb75-4d32-93e6-17b36af0a7c2-run-systemd\") pod \"ovnkube-node-42vjg\" (UID: \"046df2ef-fb75-4d32-93e6-17b36af0a7c2\") " pod="openshift-ovn-kubernetes/ovnkube-node-42vjg" Sep 29 19:08:31 crc kubenswrapper[4779]: I0929 19:08:31.082981 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/046df2ef-fb75-4d32-93e6-17b36af0a7c2-env-overrides\") pod \"ovnkube-node-42vjg\" (UID: \"046df2ef-fb75-4d32-93e6-17b36af0a7c2\") " pod="openshift-ovn-kubernetes/ovnkube-node-42vjg" Sep 29 19:08:31 crc kubenswrapper[4779]: I0929 19:08:31.083021 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/476bc421-1113-455e-bcc8-e207e47dad19-rootfs\") pod \"machine-config-daemon-d5cnr\" (UID: \"476bc421-1113-455e-bcc8-e207e47dad19\") " pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" Sep 29 19:08:31 crc kubenswrapper[4779]: I0929 19:08:31.083054 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/476bc421-1113-455e-bcc8-e207e47dad19-proxy-tls\") pod \"machine-config-daemon-d5cnr\" (UID: \"476bc421-1113-455e-bcc8-e207e47dad19\") " pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" Sep 29 19:08:31 crc kubenswrapper[4779]: I0929 19:08:31.083085 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/3ac24bbf-c37a-4253-be71-8d8f15cfd48e-host-var-lib-cni-multus\") pod \"multus-jfbb6\" (UID: \"3ac24bbf-c37a-4253-be71-8d8f15cfd48e\") " pod="openshift-multus/multus-jfbb6" Sep 29 19:08:31 crc kubenswrapper[4779]: I0929 19:08:31.083117 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/3ac24bbf-c37a-4253-be71-8d8f15cfd48e-cnibin\") pod \"multus-jfbb6\" (UID: \"3ac24bbf-c37a-4253-be71-8d8f15cfd48e\") " pod="openshift-multus/multus-jfbb6" Sep 29 19:08:31 crc kubenswrapper[4779]: I0929 19:08:31.083123 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/476bc421-1113-455e-bcc8-e207e47dad19-rootfs\") pod \"machine-config-daemon-d5cnr\" (UID: \"476bc421-1113-455e-bcc8-e207e47dad19\") " pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" Sep 29 19:08:31 crc kubenswrapper[4779]: I0929 19:08:31.083150 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/046df2ef-fb75-4d32-93e6-17b36af0a7c2-host-run-ovn-kubernetes\") pod \"ovnkube-node-42vjg\" (UID: \"046df2ef-fb75-4d32-93e6-17b36af0a7c2\") " pod="openshift-ovn-kubernetes/ovnkube-node-42vjg" Sep 29 19:08:31 crc kubenswrapper[4779]: I0929 19:08:31.085370 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/3ac24bbf-c37a-4253-be71-8d8f15cfd48e-etc-kubernetes\") pod \"multus-jfbb6\" (UID: \"3ac24bbf-c37a-4253-be71-8d8f15cfd48e\") " pod="openshift-multus/multus-jfbb6" Sep 29 19:08:31 crc kubenswrapper[4779]: I0929 19:08:31.085670 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/54a33b8e-b623-4f91-be1d-a38dfcef17d7-system-cni-dir\") pod \"multus-additional-cni-plugins-cvx8m\" (UID: \"54a33b8e-b623-4f91-be1d-a38dfcef17d7\") " pod="openshift-multus/multus-additional-cni-plugins-cvx8m" Sep 29 19:08:31 crc kubenswrapper[4779]: I0929 19:08:31.085995 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/046df2ef-fb75-4d32-93e6-17b36af0a7c2-host-cni-bin\") pod \"ovnkube-node-42vjg\" (UID: \"046df2ef-fb75-4d32-93e6-17b36af0a7c2\") " pod="openshift-ovn-kubernetes/ovnkube-node-42vjg" Sep 29 19:08:31 crc kubenswrapper[4779]: I0929 19:08:31.086040 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/3ac24bbf-c37a-4253-be71-8d8f15cfd48e-host-var-lib-kubelet\") pod \"multus-jfbb6\" (UID: \"3ac24bbf-c37a-4253-be71-8d8f15cfd48e\") " pod="openshift-multus/multus-jfbb6" Sep 29 19:08:31 crc kubenswrapper[4779]: I0929 19:08:31.086114 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/3ac24bbf-c37a-4253-be71-8d8f15cfd48e-multus-conf-dir\") pod \"multus-jfbb6\" (UID: \"3ac24bbf-c37a-4253-be71-8d8f15cfd48e\") " pod="openshift-multus/multus-jfbb6" Sep 29 19:08:31 crc kubenswrapper[4779]: I0929 19:08:31.086166 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/54a33b8e-b623-4f91-be1d-a38dfcef17d7-cnibin\") pod \"multus-additional-cni-plugins-cvx8m\" (UID: \"54a33b8e-b623-4f91-be1d-a38dfcef17d7\") " pod="openshift-multus/multus-additional-cni-plugins-cvx8m" Sep 29 19:08:31 crc kubenswrapper[4779]: I0929 19:08:31.086210 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/54a33b8e-b623-4f91-be1d-a38dfcef17d7-cni-binary-copy\") pod \"multus-additional-cni-plugins-cvx8m\" (UID: \"54a33b8e-b623-4f91-be1d-a38dfcef17d7\") " pod="openshift-multus/multus-additional-cni-plugins-cvx8m" Sep 29 19:08:31 crc kubenswrapper[4779]: I0929 19:08:31.086374 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/046df2ef-fb75-4d32-93e6-17b36af0a7c2-etc-openvswitch\") pod \"ovnkube-node-42vjg\" (UID: \"046df2ef-fb75-4d32-93e6-17b36af0a7c2\") " pod="openshift-ovn-kubernetes/ovnkube-node-42vjg" Sep 29 19:08:31 crc kubenswrapper[4779]: I0929 19:08:31.086418 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/3ac24bbf-c37a-4253-be71-8d8f15cfd48e-cni-binary-copy\") pod \"multus-jfbb6\" (UID: \"3ac24bbf-c37a-4253-be71-8d8f15cfd48e\") " pod="openshift-multus/multus-jfbb6" Sep 29 19:08:31 crc kubenswrapper[4779]: I0929 19:08:31.086460 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/3ac24bbf-c37a-4253-be71-8d8f15cfd48e-hostroot\") pod \"multus-jfbb6\" (UID: \"3ac24bbf-c37a-4253-be71-8d8f15cfd48e\") " pod="openshift-multus/multus-jfbb6" Sep 29 19:08:31 crc kubenswrapper[4779]: I0929 19:08:31.086561 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/046df2ef-fb75-4d32-93e6-17b36af0a7c2-host-cni-netd\") pod \"ovnkube-node-42vjg\" (UID: \"046df2ef-fb75-4d32-93e6-17b36af0a7c2\") " pod="openshift-ovn-kubernetes/ovnkube-node-42vjg" Sep 29 19:08:31 crc kubenswrapper[4779]: I0929 19:08:31.086588 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/54a33b8e-b623-4f91-be1d-a38dfcef17d7-os-release\") pod \"multus-additional-cni-plugins-cvx8m\" (UID: \"54a33b8e-b623-4f91-be1d-a38dfcef17d7\") " pod="openshift-multus/multus-additional-cni-plugins-cvx8m" Sep 29 19:08:31 crc kubenswrapper[4779]: I0929 19:08:31.086631 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/046df2ef-fb75-4d32-93e6-17b36af0a7c2-host-run-netns\") pod \"ovnkube-node-42vjg\" (UID: \"046df2ef-fb75-4d32-93e6-17b36af0a7c2\") " pod="openshift-ovn-kubernetes/ovnkube-node-42vjg" Sep 29 19:08:31 crc kubenswrapper[4779]: I0929 19:08:31.086657 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/046df2ef-fb75-4d32-93e6-17b36af0a7c2-run-ovn\") pod \"ovnkube-node-42vjg\" (UID: \"046df2ef-fb75-4d32-93e6-17b36af0a7c2\") " pod="openshift-ovn-kubernetes/ovnkube-node-42vjg" Sep 29 19:08:31 crc kubenswrapper[4779]: I0929 19:08:31.086709 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tb7zk\" (UniqueName: \"kubernetes.io/projected/476bc421-1113-455e-bcc8-e207e47dad19-kube-api-access-tb7zk\") pod \"machine-config-daemon-d5cnr\" (UID: \"476bc421-1113-455e-bcc8-e207e47dad19\") " pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" Sep 29 19:08:31 crc kubenswrapper[4779]: I0929 19:08:31.086736 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/3ac24bbf-c37a-4253-be71-8d8f15cfd48e-multus-socket-dir-parent\") pod \"multus-jfbb6\" (UID: \"3ac24bbf-c37a-4253-be71-8d8f15cfd48e\") " pod="openshift-multus/multus-jfbb6" Sep 29 19:08:31 crc kubenswrapper[4779]: I0929 19:08:31.086757 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/3ac24bbf-c37a-4253-be71-8d8f15cfd48e-host-var-lib-cni-bin\") pod \"multus-jfbb6\" (UID: \"3ac24bbf-c37a-4253-be71-8d8f15cfd48e\") " pod="openshift-multus/multus-jfbb6" Sep 29 19:08:31 crc kubenswrapper[4779]: I0929 19:08:31.086833 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/54a33b8e-b623-4f91-be1d-a38dfcef17d7-tuning-conf-dir\") pod \"multus-additional-cni-plugins-cvx8m\" (UID: \"54a33b8e-b623-4f91-be1d-a38dfcef17d7\") " pod="openshift-multus/multus-additional-cni-plugins-cvx8m" Sep 29 19:08:31 crc kubenswrapper[4779]: I0929 19:08:31.086871 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x2kg5\" (UniqueName: \"kubernetes.io/projected/046df2ef-fb75-4d32-93e6-17b36af0a7c2-kube-api-access-x2kg5\") pod \"ovnkube-node-42vjg\" (UID: \"046df2ef-fb75-4d32-93e6-17b36af0a7c2\") " pod="openshift-ovn-kubernetes/ovnkube-node-42vjg" Sep 29 19:08:31 crc kubenswrapper[4779]: I0929 19:08:31.086950 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/046df2ef-fb75-4d32-93e6-17b36af0a7c2-log-socket\") pod \"ovnkube-node-42vjg\" (UID: \"046df2ef-fb75-4d32-93e6-17b36af0a7c2\") " pod="openshift-ovn-kubernetes/ovnkube-node-42vjg" Sep 29 19:08:31 crc kubenswrapper[4779]: I0929 19:08:31.087064 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/3ac24bbf-c37a-4253-be71-8d8f15cfd48e-host-run-multus-certs\") pod \"multus-jfbb6\" (UID: \"3ac24bbf-c37a-4253-be71-8d8f15cfd48e\") " pod="openshift-multus/multus-jfbb6" Sep 29 19:08:31 crc kubenswrapper[4779]: I0929 19:08:31.087123 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/046df2ef-fb75-4d32-93e6-17b36af0a7c2-ovnkube-script-lib\") pod \"ovnkube-node-42vjg\" (UID: \"046df2ef-fb75-4d32-93e6-17b36af0a7c2\") " pod="openshift-ovn-kubernetes/ovnkube-node-42vjg" Sep 29 19:08:31 crc kubenswrapper[4779]: I0929 19:08:31.087221 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/3ac24bbf-c37a-4253-be71-8d8f15cfd48e-system-cni-dir\") pod \"multus-jfbb6\" (UID: \"3ac24bbf-c37a-4253-be71-8d8f15cfd48e\") " pod="openshift-multus/multus-jfbb6" Sep 29 19:08:31 crc kubenswrapper[4779]: I0929 19:08:31.087604 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/046df2ef-fb75-4d32-93e6-17b36af0a7c2-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-42vjg\" (UID: \"046df2ef-fb75-4d32-93e6-17b36af0a7c2\") " pod="openshift-ovn-kubernetes/ovnkube-node-42vjg" Sep 29 19:08:31 crc kubenswrapper[4779]: I0929 19:08:31.087626 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/046df2ef-fb75-4d32-93e6-17b36af0a7c2-ovn-node-metrics-cert\") pod \"ovnkube-node-42vjg\" (UID: \"046df2ef-fb75-4d32-93e6-17b36af0a7c2\") " pod="openshift-ovn-kubernetes/ovnkube-node-42vjg" Sep 29 19:08:31 crc kubenswrapper[4779]: I0929 19:08:31.087654 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/476bc421-1113-455e-bcc8-e207e47dad19-mcd-auth-proxy-config\") pod \"machine-config-daemon-d5cnr\" (UID: \"476bc421-1113-455e-bcc8-e207e47dad19\") " pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" Sep 29 19:08:31 crc kubenswrapper[4779]: I0929 19:08:31.087681 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/3ac24bbf-c37a-4253-be71-8d8f15cfd48e-multus-cni-dir\") pod \"multus-jfbb6\" (UID: \"3ac24bbf-c37a-4253-be71-8d8f15cfd48e\") " pod="openshift-multus/multus-jfbb6" Sep 29 19:08:31 crc kubenswrapper[4779]: I0929 19:08:31.087705 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/3ac24bbf-c37a-4253-be71-8d8f15cfd48e-host-run-netns\") pod \"multus-jfbb6\" (UID: \"3ac24bbf-c37a-4253-be71-8d8f15cfd48e\") " pod="openshift-multus/multus-jfbb6" Sep 29 19:08:31 crc kubenswrapper[4779]: I0929 19:08:31.087725 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/3ac24bbf-c37a-4253-be71-8d8f15cfd48e-multus-daemon-config\") pod \"multus-jfbb6\" (UID: \"3ac24bbf-c37a-4253-be71-8d8f15cfd48e\") " pod="openshift-multus/multus-jfbb6" Sep 29 19:08:31 crc kubenswrapper[4779]: I0929 19:08:31.087774 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/046df2ef-fb75-4d32-93e6-17b36af0a7c2-host-slash\") pod \"ovnkube-node-42vjg\" (UID: \"046df2ef-fb75-4d32-93e6-17b36af0a7c2\") " pod="openshift-ovn-kubernetes/ovnkube-node-42vjg" Sep 29 19:08:31 crc kubenswrapper[4779]: I0929 19:08:31.087842 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/046df2ef-fb75-4d32-93e6-17b36af0a7c2-var-lib-openvswitch\") pod \"ovnkube-node-42vjg\" (UID: \"046df2ef-fb75-4d32-93e6-17b36af0a7c2\") " pod="openshift-ovn-kubernetes/ovnkube-node-42vjg" Sep 29 19:08:31 crc kubenswrapper[4779]: I0929 19:08:31.087900 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/046df2ef-fb75-4d32-93e6-17b36af0a7c2-node-log\") pod \"ovnkube-node-42vjg\" (UID: \"046df2ef-fb75-4d32-93e6-17b36af0a7c2\") " pod="openshift-ovn-kubernetes/ovnkube-node-42vjg" Sep 29 19:08:31 crc kubenswrapper[4779]: I0929 19:08:31.088031 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c77zh\" (UniqueName: \"kubernetes.io/projected/54a33b8e-b623-4f91-be1d-a38dfcef17d7-kube-api-access-c77zh\") pod \"multus-additional-cni-plugins-cvx8m\" (UID: \"54a33b8e-b623-4f91-be1d-a38dfcef17d7\") " pod="openshift-multus/multus-additional-cni-plugins-cvx8m" Sep 29 19:08:31 crc kubenswrapper[4779]: I0929 19:08:31.088090 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/046df2ef-fb75-4d32-93e6-17b36af0a7c2-host-kubelet\") pod \"ovnkube-node-42vjg\" (UID: \"046df2ef-fb75-4d32-93e6-17b36af0a7c2\") " pod="openshift-ovn-kubernetes/ovnkube-node-42vjg" Sep 29 19:08:31 crc kubenswrapper[4779]: I0929 19:08:31.088569 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/476bc421-1113-455e-bcc8-e207e47dad19-mcd-auth-proxy-config\") pod \"machine-config-daemon-d5cnr\" (UID: \"476bc421-1113-455e-bcc8-e207e47dad19\") " pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" Sep 29 19:08:31 crc kubenswrapper[4779]: I0929 19:08:31.089294 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2f701cb-2e7d-4cea-8455-f68605964ac6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dfa32890cde2a0a47211239f797f83380b4418ad2b8fb1560c779705e68df39c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a017f2621de03e0184a0a168819a6762e4b36a1ae5c66a6bcfd1d65266fcb52\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a9691127cc0437ceebef7a82219adb83625690db4d6bf5cf1c8177a6a9e1284\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b14ae0b08f077b3acbe802e4a5ec06dd10075100c37d5587119a09a92b4651a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8893063a3ed083fa640798760bb57a5eb0ed66c648d2d052eff803d7e077390d\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T19:08:23Z\\\",\\\"message\\\":\\\"W0929 19:08:12.809143 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0929 19:08:12.811203 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759172892 cert, and key in /tmp/serving-cert-1445773094/serving-signer.crt, /tmp/serving-cert-1445773094/serving-signer.key\\\\nI0929 19:08:13.132775 1 observer_polling.go:159] Starting file observer\\\\nW0929 19:08:13.135435 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0929 19:08:13.135596 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 19:08:13.138018 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1445773094/tls.crt::/tmp/serving-cert-1445773094/tls.key\\\\\\\"\\\\nF0929 19:08:23.559817 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:12Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b14ae0b08f077b3acbe802e4a5ec06dd10075100c37d5587119a09a92b4651a\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T19:08:29Z\\\",\\\"message\\\":\\\"le observer\\\\nW0929 19:08:29.408025 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0929 19:08:29.408423 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 19:08:29.411188 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1038548135/tls.crt::/tmp/serving-cert-1038548135/tls.key\\\\\\\"\\\\nI0929 19:08:29.848353 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0929 19:08:29.866865 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0929 19:08:29.866980 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0929 19:08:29.867032 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0929 19:08:29.867061 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0929 19:08:29.872666 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0929 19:08:29.872766 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0929 19:08:29.872793 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0929 19:08:29.872817 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0929 19:08:29.872839 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0929 19:08:29.872861 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0929 19:08:29.872883 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0929 19:08:29.873077 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0929 19:08:29.875629 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f0916d37ad679d190403229b40390af2860e24f777a07cd5ef0ac2f7d3fc3c41\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5a737775c31cf8dc37838d904ee00f1075bc2aa52bea63587806cb7486950eaf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5a737775c31cf8dc37838d904ee00f1075bc2aa52bea63587806cb7486950eaf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:09Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 19:08:31 crc kubenswrapper[4779]: I0929 19:08:31.099107 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/476bc421-1113-455e-bcc8-e207e47dad19-proxy-tls\") pod \"machine-config-daemon-d5cnr\" (UID: \"476bc421-1113-455e-bcc8-e207e47dad19\") " pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" Sep 29 19:08:31 crc kubenswrapper[4779]: I0929 19:08:31.102791 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 19:08:31 crc kubenswrapper[4779]: I0929 19:08:31.114808 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tb7zk\" (UniqueName: \"kubernetes.io/projected/476bc421-1113-455e-bcc8-e207e47dad19-kube-api-access-tb7zk\") pod \"machine-config-daemon-d5cnr\" (UID: \"476bc421-1113-455e-bcc8-e207e47dad19\") " pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" Sep 29 19:08:31 crc kubenswrapper[4779]: I0929 19:08:31.128635 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-42vjg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"046df2ef-fb75-4d32-93e6-17b36af0a7c2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:30Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-42vjg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 19:08:31 crc kubenswrapper[4779]: I0929 19:08:31.139034 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-7hb2m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"45b89d12-bbd2-4b47-815d-a7421cc1aa00\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bvn4g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:30Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-7hb2m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 19:08:31 crc kubenswrapper[4779]: I0929 19:08:31.152824 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 19:08:31 crc kubenswrapper[4779]: I0929 19:08:31.188664 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/54a33b8e-b623-4f91-be1d-a38dfcef17d7-cnibin\") pod \"multus-additional-cni-plugins-cvx8m\" (UID: \"54a33b8e-b623-4f91-be1d-a38dfcef17d7\") " pod="openshift-multus/multus-additional-cni-plugins-cvx8m" Sep 29 19:08:31 crc kubenswrapper[4779]: I0929 19:08:31.188815 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/54a33b8e-b623-4f91-be1d-a38dfcef17d7-cni-binary-copy\") pod \"multus-additional-cni-plugins-cvx8m\" (UID: \"54a33b8e-b623-4f91-be1d-a38dfcef17d7\") " pod="openshift-multus/multus-additional-cni-plugins-cvx8m" Sep 29 19:08:31 crc kubenswrapper[4779]: I0929 19:08:31.189656 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/046df2ef-fb75-4d32-93e6-17b36af0a7c2-etc-openvswitch\") pod \"ovnkube-node-42vjg\" (UID: \"046df2ef-fb75-4d32-93e6-17b36af0a7c2\") " pod="openshift-ovn-kubernetes/ovnkube-node-42vjg" Sep 29 19:08:31 crc kubenswrapper[4779]: I0929 19:08:31.189676 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/3ac24bbf-c37a-4253-be71-8d8f15cfd48e-host-var-lib-kubelet\") pod \"multus-jfbb6\" (UID: \"3ac24bbf-c37a-4253-be71-8d8f15cfd48e\") " pod="openshift-multus/multus-jfbb6" Sep 29 19:08:31 crc kubenswrapper[4779]: I0929 19:08:31.189756 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/3ac24bbf-c37a-4253-be71-8d8f15cfd48e-multus-conf-dir\") pod \"multus-jfbb6\" (UID: \"3ac24bbf-c37a-4253-be71-8d8f15cfd48e\") " pod="openshift-multus/multus-jfbb6" Sep 29 19:08:31 crc kubenswrapper[4779]: I0929 19:08:31.189773 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/3ac24bbf-c37a-4253-be71-8d8f15cfd48e-cni-binary-copy\") pod \"multus-jfbb6\" (UID: \"3ac24bbf-c37a-4253-be71-8d8f15cfd48e\") " pod="openshift-multus/multus-jfbb6" Sep 29 19:08:31 crc kubenswrapper[4779]: I0929 19:08:31.189787 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/3ac24bbf-c37a-4253-be71-8d8f15cfd48e-hostroot\") pod \"multus-jfbb6\" (UID: \"3ac24bbf-c37a-4253-be71-8d8f15cfd48e\") " pod="openshift-multus/multus-jfbb6" Sep 29 19:08:31 crc kubenswrapper[4779]: I0929 19:08:31.189804 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/046df2ef-fb75-4d32-93e6-17b36af0a7c2-host-cni-netd\") pod \"ovnkube-node-42vjg\" (UID: \"046df2ef-fb75-4d32-93e6-17b36af0a7c2\") " pod="openshift-ovn-kubernetes/ovnkube-node-42vjg" Sep 29 19:08:31 crc kubenswrapper[4779]: I0929 19:08:31.189820 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/54a33b8e-b623-4f91-be1d-a38dfcef17d7-os-release\") pod \"multus-additional-cni-plugins-cvx8m\" (UID: \"54a33b8e-b623-4f91-be1d-a38dfcef17d7\") " pod="openshift-multus/multus-additional-cni-plugins-cvx8m" Sep 29 19:08:31 crc kubenswrapper[4779]: I0929 19:08:31.189836 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/046df2ef-fb75-4d32-93e6-17b36af0a7c2-host-run-netns\") pod \"ovnkube-node-42vjg\" (UID: \"046df2ef-fb75-4d32-93e6-17b36af0a7c2\") " pod="openshift-ovn-kubernetes/ovnkube-node-42vjg" Sep 29 19:08:31 crc kubenswrapper[4779]: I0929 19:08:31.189853 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/046df2ef-fb75-4d32-93e6-17b36af0a7c2-run-ovn\") pod \"ovnkube-node-42vjg\" (UID: \"046df2ef-fb75-4d32-93e6-17b36af0a7c2\") " pod="openshift-ovn-kubernetes/ovnkube-node-42vjg" Sep 29 19:08:31 crc kubenswrapper[4779]: I0929 19:08:31.189870 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/3ac24bbf-c37a-4253-be71-8d8f15cfd48e-multus-socket-dir-parent\") pod \"multus-jfbb6\" (UID: \"3ac24bbf-c37a-4253-be71-8d8f15cfd48e\") " pod="openshift-multus/multus-jfbb6" Sep 29 19:08:31 crc kubenswrapper[4779]: I0929 19:08:31.189888 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/3ac24bbf-c37a-4253-be71-8d8f15cfd48e-host-var-lib-cni-bin\") pod \"multus-jfbb6\" (UID: \"3ac24bbf-c37a-4253-be71-8d8f15cfd48e\") " pod="openshift-multus/multus-jfbb6" Sep 29 19:08:31 crc kubenswrapper[4779]: I0929 19:08:31.189903 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/54a33b8e-b623-4f91-be1d-a38dfcef17d7-tuning-conf-dir\") pod \"multus-additional-cni-plugins-cvx8m\" (UID: \"54a33b8e-b623-4f91-be1d-a38dfcef17d7\") " pod="openshift-multus/multus-additional-cni-plugins-cvx8m" Sep 29 19:08:31 crc kubenswrapper[4779]: I0929 19:08:31.189937 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/046df2ef-fb75-4d32-93e6-17b36af0a7c2-log-socket\") pod \"ovnkube-node-42vjg\" (UID: \"046df2ef-fb75-4d32-93e6-17b36af0a7c2\") " pod="openshift-ovn-kubernetes/ovnkube-node-42vjg" Sep 29 19:08:31 crc kubenswrapper[4779]: I0929 19:08:31.189954 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x2kg5\" (UniqueName: \"kubernetes.io/projected/046df2ef-fb75-4d32-93e6-17b36af0a7c2-kube-api-access-x2kg5\") pod \"ovnkube-node-42vjg\" (UID: \"046df2ef-fb75-4d32-93e6-17b36af0a7c2\") " pod="openshift-ovn-kubernetes/ovnkube-node-42vjg" Sep 29 19:08:31 crc kubenswrapper[4779]: I0929 19:08:31.189972 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/3ac24bbf-c37a-4253-be71-8d8f15cfd48e-host-run-multus-certs\") pod \"multus-jfbb6\" (UID: \"3ac24bbf-c37a-4253-be71-8d8f15cfd48e\") " pod="openshift-multus/multus-jfbb6" Sep 29 19:08:31 crc kubenswrapper[4779]: I0929 19:08:31.189988 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/046df2ef-fb75-4d32-93e6-17b36af0a7c2-ovnkube-script-lib\") pod \"ovnkube-node-42vjg\" (UID: \"046df2ef-fb75-4d32-93e6-17b36af0a7c2\") " pod="openshift-ovn-kubernetes/ovnkube-node-42vjg" Sep 29 19:08:31 crc kubenswrapper[4779]: I0929 19:08:31.190014 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/3ac24bbf-c37a-4253-be71-8d8f15cfd48e-system-cni-dir\") pod \"multus-jfbb6\" (UID: \"3ac24bbf-c37a-4253-be71-8d8f15cfd48e\") " pod="openshift-multus/multus-jfbb6" Sep 29 19:08:31 crc kubenswrapper[4779]: I0929 19:08:31.190030 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/046df2ef-fb75-4d32-93e6-17b36af0a7c2-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-42vjg\" (UID: \"046df2ef-fb75-4d32-93e6-17b36af0a7c2\") " pod="openshift-ovn-kubernetes/ovnkube-node-42vjg" Sep 29 19:08:31 crc kubenswrapper[4779]: I0929 19:08:31.190048 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/046df2ef-fb75-4d32-93e6-17b36af0a7c2-ovn-node-metrics-cert\") pod \"ovnkube-node-42vjg\" (UID: \"046df2ef-fb75-4d32-93e6-17b36af0a7c2\") " pod="openshift-ovn-kubernetes/ovnkube-node-42vjg" Sep 29 19:08:31 crc kubenswrapper[4779]: I0929 19:08:31.190064 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/3ac24bbf-c37a-4253-be71-8d8f15cfd48e-host-run-netns\") pod \"multus-jfbb6\" (UID: \"3ac24bbf-c37a-4253-be71-8d8f15cfd48e\") " pod="openshift-multus/multus-jfbb6" Sep 29 19:08:31 crc kubenswrapper[4779]: I0929 19:08:31.190079 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/3ac24bbf-c37a-4253-be71-8d8f15cfd48e-multus-daemon-config\") pod \"multus-jfbb6\" (UID: \"3ac24bbf-c37a-4253-be71-8d8f15cfd48e\") " pod="openshift-multus/multus-jfbb6" Sep 29 19:08:31 crc kubenswrapper[4779]: I0929 19:08:31.190098 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/046df2ef-fb75-4d32-93e6-17b36af0a7c2-host-slash\") pod \"ovnkube-node-42vjg\" (UID: \"046df2ef-fb75-4d32-93e6-17b36af0a7c2\") " pod="openshift-ovn-kubernetes/ovnkube-node-42vjg" Sep 29 19:08:31 crc kubenswrapper[4779]: I0929 19:08:31.190117 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/046df2ef-fb75-4d32-93e6-17b36af0a7c2-var-lib-openvswitch\") pod \"ovnkube-node-42vjg\" (UID: \"046df2ef-fb75-4d32-93e6-17b36af0a7c2\") " pod="openshift-ovn-kubernetes/ovnkube-node-42vjg" Sep 29 19:08:31 crc kubenswrapper[4779]: I0929 19:08:31.190133 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/046df2ef-fb75-4d32-93e6-17b36af0a7c2-node-log\") pod \"ovnkube-node-42vjg\" (UID: \"046df2ef-fb75-4d32-93e6-17b36af0a7c2\") " pod="openshift-ovn-kubernetes/ovnkube-node-42vjg" Sep 29 19:08:31 crc kubenswrapper[4779]: I0929 19:08:31.190150 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/3ac24bbf-c37a-4253-be71-8d8f15cfd48e-multus-cni-dir\") pod \"multus-jfbb6\" (UID: \"3ac24bbf-c37a-4253-be71-8d8f15cfd48e\") " pod="openshift-multus/multus-jfbb6" Sep 29 19:08:31 crc kubenswrapper[4779]: I0929 19:08:31.190165 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c77zh\" (UniqueName: \"kubernetes.io/projected/54a33b8e-b623-4f91-be1d-a38dfcef17d7-kube-api-access-c77zh\") pod \"multus-additional-cni-plugins-cvx8m\" (UID: \"54a33b8e-b623-4f91-be1d-a38dfcef17d7\") " pod="openshift-multus/multus-additional-cni-plugins-cvx8m" Sep 29 19:08:31 crc kubenswrapper[4779]: I0929 19:08:31.190181 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/046df2ef-fb75-4d32-93e6-17b36af0a7c2-host-kubelet\") pod \"ovnkube-node-42vjg\" (UID: \"046df2ef-fb75-4d32-93e6-17b36af0a7c2\") " pod="openshift-ovn-kubernetes/ovnkube-node-42vjg" Sep 29 19:08:31 crc kubenswrapper[4779]: I0929 19:08:31.190197 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/046df2ef-fb75-4d32-93e6-17b36af0a7c2-systemd-units\") pod \"ovnkube-node-42vjg\" (UID: \"046df2ef-fb75-4d32-93e6-17b36af0a7c2\") " pod="openshift-ovn-kubernetes/ovnkube-node-42vjg" Sep 29 19:08:31 crc kubenswrapper[4779]: I0929 19:08:31.190213 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/046df2ef-fb75-4d32-93e6-17b36af0a7c2-run-openvswitch\") pod \"ovnkube-node-42vjg\" (UID: \"046df2ef-fb75-4d32-93e6-17b36af0a7c2\") " pod="openshift-ovn-kubernetes/ovnkube-node-42vjg" Sep 29 19:08:31 crc kubenswrapper[4779]: I0929 19:08:31.190229 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/3ac24bbf-c37a-4253-be71-8d8f15cfd48e-os-release\") pod \"multus-jfbb6\" (UID: \"3ac24bbf-c37a-4253-be71-8d8f15cfd48e\") " pod="openshift-multus/multus-jfbb6" Sep 29 19:08:31 crc kubenswrapper[4779]: I0929 19:08:31.190244 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8ckjq\" (UniqueName: \"kubernetes.io/projected/3ac24bbf-c37a-4253-be71-8d8f15cfd48e-kube-api-access-8ckjq\") pod \"multus-jfbb6\" (UID: \"3ac24bbf-c37a-4253-be71-8d8f15cfd48e\") " pod="openshift-multus/multus-jfbb6" Sep 29 19:08:31 crc kubenswrapper[4779]: I0929 19:08:31.190259 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/54a33b8e-b623-4f91-be1d-a38dfcef17d7-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-cvx8m\" (UID: \"54a33b8e-b623-4f91-be1d-a38dfcef17d7\") " pod="openshift-multus/multus-additional-cni-plugins-cvx8m" Sep 29 19:08:31 crc kubenswrapper[4779]: I0929 19:08:31.190276 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/046df2ef-fb75-4d32-93e6-17b36af0a7c2-ovnkube-config\") pod \"ovnkube-node-42vjg\" (UID: \"046df2ef-fb75-4d32-93e6-17b36af0a7c2\") " pod="openshift-ovn-kubernetes/ovnkube-node-42vjg" Sep 29 19:08:31 crc kubenswrapper[4779]: I0929 19:08:31.190297 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/3ac24bbf-c37a-4253-be71-8d8f15cfd48e-host-run-k8s-cni-cncf-io\") pod \"multus-jfbb6\" (UID: \"3ac24bbf-c37a-4253-be71-8d8f15cfd48e\") " pod="openshift-multus/multus-jfbb6" Sep 29 19:08:31 crc kubenswrapper[4779]: I0929 19:08:31.190312 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/046df2ef-fb75-4d32-93e6-17b36af0a7c2-run-systemd\") pod \"ovnkube-node-42vjg\" (UID: \"046df2ef-fb75-4d32-93e6-17b36af0a7c2\") " pod="openshift-ovn-kubernetes/ovnkube-node-42vjg" Sep 29 19:08:31 crc kubenswrapper[4779]: I0929 19:08:31.190356 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/046df2ef-fb75-4d32-93e6-17b36af0a7c2-env-overrides\") pod \"ovnkube-node-42vjg\" (UID: \"046df2ef-fb75-4d32-93e6-17b36af0a7c2\") " pod="openshift-ovn-kubernetes/ovnkube-node-42vjg" Sep 29 19:08:31 crc kubenswrapper[4779]: I0929 19:08:31.190374 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/3ac24bbf-c37a-4253-be71-8d8f15cfd48e-host-var-lib-cni-multus\") pod \"multus-jfbb6\" (UID: \"3ac24bbf-c37a-4253-be71-8d8f15cfd48e\") " pod="openshift-multus/multus-jfbb6" Sep 29 19:08:31 crc kubenswrapper[4779]: I0929 19:08:31.190390 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/046df2ef-fb75-4d32-93e6-17b36af0a7c2-host-run-ovn-kubernetes\") pod \"ovnkube-node-42vjg\" (UID: \"046df2ef-fb75-4d32-93e6-17b36af0a7c2\") " pod="openshift-ovn-kubernetes/ovnkube-node-42vjg" Sep 29 19:08:31 crc kubenswrapper[4779]: I0929 19:08:31.190405 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/3ac24bbf-c37a-4253-be71-8d8f15cfd48e-cnibin\") pod \"multus-jfbb6\" (UID: \"3ac24bbf-c37a-4253-be71-8d8f15cfd48e\") " pod="openshift-multus/multus-jfbb6" Sep 29 19:08:31 crc kubenswrapper[4779]: I0929 19:08:31.190420 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/3ac24bbf-c37a-4253-be71-8d8f15cfd48e-etc-kubernetes\") pod \"multus-jfbb6\" (UID: \"3ac24bbf-c37a-4253-be71-8d8f15cfd48e\") " pod="openshift-multus/multus-jfbb6" Sep 29 19:08:31 crc kubenswrapper[4779]: I0929 19:08:31.190435 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/54a33b8e-b623-4f91-be1d-a38dfcef17d7-system-cni-dir\") pod \"multus-additional-cni-plugins-cvx8m\" (UID: \"54a33b8e-b623-4f91-be1d-a38dfcef17d7\") " pod="openshift-multus/multus-additional-cni-plugins-cvx8m" Sep 29 19:08:31 crc kubenswrapper[4779]: I0929 19:08:31.190449 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/046df2ef-fb75-4d32-93e6-17b36af0a7c2-host-cni-bin\") pod \"ovnkube-node-42vjg\" (UID: \"046df2ef-fb75-4d32-93e6-17b36af0a7c2\") " pod="openshift-ovn-kubernetes/ovnkube-node-42vjg" Sep 29 19:08:31 crc kubenswrapper[4779]: I0929 19:08:31.190496 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/046df2ef-fb75-4d32-93e6-17b36af0a7c2-host-cni-bin\") pod \"ovnkube-node-42vjg\" (UID: \"046df2ef-fb75-4d32-93e6-17b36af0a7c2\") " pod="openshift-ovn-kubernetes/ovnkube-node-42vjg" Sep 29 19:08:31 crc kubenswrapper[4779]: I0929 19:08:31.189716 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/046df2ef-fb75-4d32-93e6-17b36af0a7c2-etc-openvswitch\") pod \"ovnkube-node-42vjg\" (UID: \"046df2ef-fb75-4d32-93e6-17b36af0a7c2\") " pod="openshift-ovn-kubernetes/ovnkube-node-42vjg" Sep 29 19:08:31 crc kubenswrapper[4779]: I0929 19:08:31.190540 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/3ac24bbf-c37a-4253-be71-8d8f15cfd48e-multus-conf-dir\") pod \"multus-jfbb6\" (UID: \"3ac24bbf-c37a-4253-be71-8d8f15cfd48e\") " pod="openshift-multus/multus-jfbb6" Sep 29 19:08:31 crc kubenswrapper[4779]: I0929 19:08:31.188760 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/54a33b8e-b623-4f91-be1d-a38dfcef17d7-cnibin\") pod \"multus-additional-cni-plugins-cvx8m\" (UID: \"54a33b8e-b623-4f91-be1d-a38dfcef17d7\") " pod="openshift-multus/multus-additional-cni-plugins-cvx8m" Sep 29 19:08:31 crc kubenswrapper[4779]: I0929 19:08:31.191022 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/3ac24bbf-c37a-4253-be71-8d8f15cfd48e-cni-binary-copy\") pod \"multus-jfbb6\" (UID: \"3ac24bbf-c37a-4253-be71-8d8f15cfd48e\") " pod="openshift-multus/multus-jfbb6" Sep 29 19:08:31 crc kubenswrapper[4779]: I0929 19:08:31.189602 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/54a33b8e-b623-4f91-be1d-a38dfcef17d7-cni-binary-copy\") pod \"multus-additional-cni-plugins-cvx8m\" (UID: \"54a33b8e-b623-4f91-be1d-a38dfcef17d7\") " pod="openshift-multus/multus-additional-cni-plugins-cvx8m" Sep 29 19:08:31 crc kubenswrapper[4779]: I0929 19:08:31.191063 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/3ac24bbf-c37a-4253-be71-8d8f15cfd48e-hostroot\") pod \"multus-jfbb6\" (UID: \"3ac24bbf-c37a-4253-be71-8d8f15cfd48e\") " pod="openshift-multus/multus-jfbb6" Sep 29 19:08:31 crc kubenswrapper[4779]: I0929 19:08:31.189737 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/3ac24bbf-c37a-4253-be71-8d8f15cfd48e-host-var-lib-kubelet\") pod \"multus-jfbb6\" (UID: \"3ac24bbf-c37a-4253-be71-8d8f15cfd48e\") " pod="openshift-multus/multus-jfbb6" Sep 29 19:08:31 crc kubenswrapper[4779]: I0929 19:08:31.191092 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/046df2ef-fb75-4d32-93e6-17b36af0a7c2-host-cni-netd\") pod \"ovnkube-node-42vjg\" (UID: \"046df2ef-fb75-4d32-93e6-17b36af0a7c2\") " pod="openshift-ovn-kubernetes/ovnkube-node-42vjg" Sep 29 19:08:31 crc kubenswrapper[4779]: I0929 19:08:31.191264 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/54a33b8e-b623-4f91-be1d-a38dfcef17d7-os-release\") pod \"multus-additional-cni-plugins-cvx8m\" (UID: \"54a33b8e-b623-4f91-be1d-a38dfcef17d7\") " pod="openshift-multus/multus-additional-cni-plugins-cvx8m" Sep 29 19:08:31 crc kubenswrapper[4779]: I0929 19:08:31.191286 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/046df2ef-fb75-4d32-93e6-17b36af0a7c2-host-run-netns\") pod \"ovnkube-node-42vjg\" (UID: \"046df2ef-fb75-4d32-93e6-17b36af0a7c2\") " pod="openshift-ovn-kubernetes/ovnkube-node-42vjg" Sep 29 19:08:31 crc kubenswrapper[4779]: I0929 19:08:31.191304 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/046df2ef-fb75-4d32-93e6-17b36af0a7c2-run-ovn\") pod \"ovnkube-node-42vjg\" (UID: \"046df2ef-fb75-4d32-93e6-17b36af0a7c2\") " pod="openshift-ovn-kubernetes/ovnkube-node-42vjg" Sep 29 19:08:31 crc kubenswrapper[4779]: I0929 19:08:31.191359 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/3ac24bbf-c37a-4253-be71-8d8f15cfd48e-multus-socket-dir-parent\") pod \"multus-jfbb6\" (UID: \"3ac24bbf-c37a-4253-be71-8d8f15cfd48e\") " pod="openshift-multus/multus-jfbb6" Sep 29 19:08:31 crc kubenswrapper[4779]: I0929 19:08:31.191378 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/3ac24bbf-c37a-4253-be71-8d8f15cfd48e-host-var-lib-cni-bin\") pod \"multus-jfbb6\" (UID: \"3ac24bbf-c37a-4253-be71-8d8f15cfd48e\") " pod="openshift-multus/multus-jfbb6" Sep 29 19:08:31 crc kubenswrapper[4779]: I0929 19:08:31.192002 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/046df2ef-fb75-4d32-93e6-17b36af0a7c2-host-kubelet\") pod \"ovnkube-node-42vjg\" (UID: \"046df2ef-fb75-4d32-93e6-17b36af0a7c2\") " pod="openshift-ovn-kubernetes/ovnkube-node-42vjg" Sep 29 19:08:31 crc kubenswrapper[4779]: I0929 19:08:31.192076 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/046df2ef-fb75-4d32-93e6-17b36af0a7c2-log-socket\") pod \"ovnkube-node-42vjg\" (UID: \"046df2ef-fb75-4d32-93e6-17b36af0a7c2\") " pod="openshift-ovn-kubernetes/ovnkube-node-42vjg" Sep 29 19:08:31 crc kubenswrapper[4779]: I0929 19:08:31.192356 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/3ac24bbf-c37a-4253-be71-8d8f15cfd48e-host-run-netns\") pod \"multus-jfbb6\" (UID: \"3ac24bbf-c37a-4253-be71-8d8f15cfd48e\") " pod="openshift-multus/multus-jfbb6" Sep 29 19:08:31 crc kubenswrapper[4779]: I0929 19:08:31.192393 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/3ac24bbf-c37a-4253-be71-8d8f15cfd48e-host-run-multus-certs\") pod \"multus-jfbb6\" (UID: \"3ac24bbf-c37a-4253-be71-8d8f15cfd48e\") " pod="openshift-multus/multus-jfbb6" Sep 29 19:08:31 crc kubenswrapper[4779]: I0929 19:08:31.192712 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/3ac24bbf-c37a-4253-be71-8d8f15cfd48e-host-run-k8s-cni-cncf-io\") pod \"multus-jfbb6\" (UID: \"3ac24bbf-c37a-4253-be71-8d8f15cfd48e\") " pod="openshift-multus/multus-jfbb6" Sep 29 19:08:31 crc kubenswrapper[4779]: I0929 19:08:31.192874 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/3ac24bbf-c37a-4253-be71-8d8f15cfd48e-multus-daemon-config\") pod \"multus-jfbb6\" (UID: \"3ac24bbf-c37a-4253-be71-8d8f15cfd48e\") " pod="openshift-multus/multus-jfbb6" Sep 29 19:08:31 crc kubenswrapper[4779]: I0929 19:08:31.192894 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/046df2ef-fb75-4d32-93e6-17b36af0a7c2-systemd-units\") pod \"ovnkube-node-42vjg\" (UID: \"046df2ef-fb75-4d32-93e6-17b36af0a7c2\") " pod="openshift-ovn-kubernetes/ovnkube-node-42vjg" Sep 29 19:08:31 crc kubenswrapper[4779]: I0929 19:08:31.192919 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/046df2ef-fb75-4d32-93e6-17b36af0a7c2-host-slash\") pod \"ovnkube-node-42vjg\" (UID: \"046df2ef-fb75-4d32-93e6-17b36af0a7c2\") " pod="openshift-ovn-kubernetes/ovnkube-node-42vjg" Sep 29 19:08:31 crc kubenswrapper[4779]: I0929 19:08:31.192942 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/046df2ef-fb75-4d32-93e6-17b36af0a7c2-run-openvswitch\") pod \"ovnkube-node-42vjg\" (UID: \"046df2ef-fb75-4d32-93e6-17b36af0a7c2\") " pod="openshift-ovn-kubernetes/ovnkube-node-42vjg" Sep 29 19:08:31 crc kubenswrapper[4779]: I0929 19:08:31.192948 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/046df2ef-fb75-4d32-93e6-17b36af0a7c2-var-lib-openvswitch\") pod \"ovnkube-node-42vjg\" (UID: \"046df2ef-fb75-4d32-93e6-17b36af0a7c2\") " pod="openshift-ovn-kubernetes/ovnkube-node-42vjg" Sep 29 19:08:31 crc kubenswrapper[4779]: I0929 19:08:31.192960 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/046df2ef-fb75-4d32-93e6-17b36af0a7c2-ovnkube-script-lib\") pod \"ovnkube-node-42vjg\" (UID: \"046df2ef-fb75-4d32-93e6-17b36af0a7c2\") " pod="openshift-ovn-kubernetes/ovnkube-node-42vjg" Sep 29 19:08:31 crc kubenswrapper[4779]: I0929 19:08:31.193066 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/046df2ef-fb75-4d32-93e6-17b36af0a7c2-host-run-ovn-kubernetes\") pod \"ovnkube-node-42vjg\" (UID: \"046df2ef-fb75-4d32-93e6-17b36af0a7c2\") " pod="openshift-ovn-kubernetes/ovnkube-node-42vjg" Sep 29 19:08:31 crc kubenswrapper[4779]: I0929 19:08:31.193139 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/3ac24bbf-c37a-4253-be71-8d8f15cfd48e-os-release\") pod \"multus-jfbb6\" (UID: \"3ac24bbf-c37a-4253-be71-8d8f15cfd48e\") " pod="openshift-multus/multus-jfbb6" Sep 29 19:08:31 crc kubenswrapper[4779]: I0929 19:08:31.193148 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/046df2ef-fb75-4d32-93e6-17b36af0a7c2-run-systemd\") pod \"ovnkube-node-42vjg\" (UID: \"046df2ef-fb75-4d32-93e6-17b36af0a7c2\") " pod="openshift-ovn-kubernetes/ovnkube-node-42vjg" Sep 29 19:08:31 crc kubenswrapper[4779]: I0929 19:08:31.193285 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/3ac24bbf-c37a-4253-be71-8d8f15cfd48e-multus-cni-dir\") pod \"multus-jfbb6\" (UID: \"3ac24bbf-c37a-4253-be71-8d8f15cfd48e\") " pod="openshift-multus/multus-jfbb6" Sep 29 19:08:31 crc kubenswrapper[4779]: I0929 19:08:31.193602 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/54a33b8e-b623-4f91-be1d-a38dfcef17d7-tuning-conf-dir\") pod \"multus-additional-cni-plugins-cvx8m\" (UID: \"54a33b8e-b623-4f91-be1d-a38dfcef17d7\") " pod="openshift-multus/multus-additional-cni-plugins-cvx8m" Sep 29 19:08:31 crc kubenswrapper[4779]: I0929 19:08:31.193655 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/3ac24bbf-c37a-4253-be71-8d8f15cfd48e-system-cni-dir\") pod \"multus-jfbb6\" (UID: \"3ac24bbf-c37a-4253-be71-8d8f15cfd48e\") " pod="openshift-multus/multus-jfbb6" Sep 29 19:08:31 crc kubenswrapper[4779]: I0929 19:08:31.193680 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/046df2ef-fb75-4d32-93e6-17b36af0a7c2-node-log\") pod \"ovnkube-node-42vjg\" (UID: \"046df2ef-fb75-4d32-93e6-17b36af0a7c2\") " pod="openshift-ovn-kubernetes/ovnkube-node-42vjg" Sep 29 19:08:31 crc kubenswrapper[4779]: I0929 19:08:31.193700 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/3ac24bbf-c37a-4253-be71-8d8f15cfd48e-etc-kubernetes\") pod \"multus-jfbb6\" (UID: \"3ac24bbf-c37a-4253-be71-8d8f15cfd48e\") " pod="openshift-multus/multus-jfbb6" Sep 29 19:08:31 crc kubenswrapper[4779]: I0929 19:08:31.193752 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/046df2ef-fb75-4d32-93e6-17b36af0a7c2-env-overrides\") pod \"ovnkube-node-42vjg\" (UID: \"046df2ef-fb75-4d32-93e6-17b36af0a7c2\") " pod="openshift-ovn-kubernetes/ovnkube-node-42vjg" Sep 29 19:08:31 crc kubenswrapper[4779]: I0929 19:08:31.193766 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/3ac24bbf-c37a-4253-be71-8d8f15cfd48e-cnibin\") pod \"multus-jfbb6\" (UID: \"3ac24bbf-c37a-4253-be71-8d8f15cfd48e\") " pod="openshift-multus/multus-jfbb6" Sep 29 19:08:31 crc kubenswrapper[4779]: I0929 19:08:31.193791 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/046df2ef-fb75-4d32-93e6-17b36af0a7c2-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-42vjg\" (UID: \"046df2ef-fb75-4d32-93e6-17b36af0a7c2\") " pod="openshift-ovn-kubernetes/ovnkube-node-42vjg" Sep 29 19:08:31 crc kubenswrapper[4779]: I0929 19:08:31.193798 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/54a33b8e-b623-4f91-be1d-a38dfcef17d7-system-cni-dir\") pod \"multus-additional-cni-plugins-cvx8m\" (UID: \"54a33b8e-b623-4f91-be1d-a38dfcef17d7\") " pod="openshift-multus/multus-additional-cni-plugins-cvx8m" Sep 29 19:08:31 crc kubenswrapper[4779]: I0929 19:08:31.193814 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/3ac24bbf-c37a-4253-be71-8d8f15cfd48e-host-var-lib-cni-multus\") pod \"multus-jfbb6\" (UID: \"3ac24bbf-c37a-4253-be71-8d8f15cfd48e\") " pod="openshift-multus/multus-jfbb6" Sep 29 19:08:31 crc kubenswrapper[4779]: I0929 19:08:31.193816 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/046df2ef-fb75-4d32-93e6-17b36af0a7c2-ovnkube-config\") pod \"ovnkube-node-42vjg\" (UID: \"046df2ef-fb75-4d32-93e6-17b36af0a7c2\") " pod="openshift-ovn-kubernetes/ovnkube-node-42vjg" Sep 29 19:08:31 crc kubenswrapper[4779]: I0929 19:08:31.195878 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/54a33b8e-b623-4f91-be1d-a38dfcef17d7-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-cvx8m\" (UID: \"54a33b8e-b623-4f91-be1d-a38dfcef17d7\") " pod="openshift-multus/multus-additional-cni-plugins-cvx8m" Sep 29 19:08:31 crc kubenswrapper[4779]: I0929 19:08:31.200599 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/046df2ef-fb75-4d32-93e6-17b36af0a7c2-ovn-node-metrics-cert\") pod \"ovnkube-node-42vjg\" (UID: \"046df2ef-fb75-4d32-93e6-17b36af0a7c2\") " pod="openshift-ovn-kubernetes/ovnkube-node-42vjg" Sep 29 19:08:31 crc kubenswrapper[4779]: I0929 19:08:31.209703 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c77zh\" (UniqueName: \"kubernetes.io/projected/54a33b8e-b623-4f91-be1d-a38dfcef17d7-kube-api-access-c77zh\") pod \"multus-additional-cni-plugins-cvx8m\" (UID: \"54a33b8e-b623-4f91-be1d-a38dfcef17d7\") " pod="openshift-multus/multus-additional-cni-plugins-cvx8m" Sep 29 19:08:31 crc kubenswrapper[4779]: I0929 19:08:31.210313 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x2kg5\" (UniqueName: \"kubernetes.io/projected/046df2ef-fb75-4d32-93e6-17b36af0a7c2-kube-api-access-x2kg5\") pod \"ovnkube-node-42vjg\" (UID: \"046df2ef-fb75-4d32-93e6-17b36af0a7c2\") " pod="openshift-ovn-kubernetes/ovnkube-node-42vjg" Sep 29 19:08:31 crc kubenswrapper[4779]: I0929 19:08:31.212108 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8ckjq\" (UniqueName: \"kubernetes.io/projected/3ac24bbf-c37a-4253-be71-8d8f15cfd48e-kube-api-access-8ckjq\") pod \"multus-jfbb6\" (UID: \"3ac24bbf-c37a-4253-be71-8d8f15cfd48e\") " pod="openshift-multus/multus-jfbb6" Sep 29 19:08:31 crc kubenswrapper[4779]: I0929 19:08:31.256460 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" Sep 29 19:08:31 crc kubenswrapper[4779]: I0929 19:08:31.265297 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-jfbb6" Sep 29 19:08:31 crc kubenswrapper[4779]: I0929 19:08:31.273533 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-cvx8m" Sep 29 19:08:31 crc kubenswrapper[4779]: W0929 19:08:31.273868 4779 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod476bc421_1113_455e_bcc8_e207e47dad19.slice/crio-fc6cad44dec707b4c690c13b38787c2573a0f8def6522559a2563f5595e30ac4 WatchSource:0}: Error finding container fc6cad44dec707b4c690c13b38787c2573a0f8def6522559a2563f5595e30ac4: Status 404 returned error can't find the container with id fc6cad44dec707b4c690c13b38787c2573a0f8def6522559a2563f5595e30ac4 Sep 29 19:08:31 crc kubenswrapper[4779]: I0929 19:08:31.281050 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-42vjg" Sep 29 19:08:31 crc kubenswrapper[4779]: W0929 19:08:31.302089 4779 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod046df2ef_fb75_4d32_93e6_17b36af0a7c2.slice/crio-66a10a1da39ecac420aef1a9520c7fa31a9b94be01cf95454ef84d875a969b8d WatchSource:0}: Error finding container 66a10a1da39ecac420aef1a9520c7fa31a9b94be01cf95454ef84d875a969b8d: Status 404 returned error can't find the container with id 66a10a1da39ecac420aef1a9520c7fa31a9b94be01cf95454ef84d875a969b8d Sep 29 19:08:31 crc kubenswrapper[4779]: I0929 19:08:31.495175 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 19:08:31 crc kubenswrapper[4779]: I0929 19:08:31.495419 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 19:08:31 crc kubenswrapper[4779]: I0929 19:08:31.495456 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 19:08:31 crc kubenswrapper[4779]: I0929 19:08:31.495477 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 19:08:31 crc kubenswrapper[4779]: E0929 19:08:31.495557 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 19:08:32.495531952 +0000 UTC m=+23.379957052 (durationBeforeRetry 1s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 19:08:31 crc kubenswrapper[4779]: I0929 19:08:31.495608 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 19:08:31 crc kubenswrapper[4779]: E0929 19:08:31.495615 4779 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Sep 29 19:08:31 crc kubenswrapper[4779]: E0929 19:08:31.495658 4779 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Sep 29 19:08:31 crc kubenswrapper[4779]: E0929 19:08:31.495714 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-09-29 19:08:32.495692527 +0000 UTC m=+23.380117627 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Sep 29 19:08:31 crc kubenswrapper[4779]: E0929 19:08:31.495730 4779 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Sep 29 19:08:31 crc kubenswrapper[4779]: E0929 19:08:31.495746 4779 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 29 19:08:31 crc kubenswrapper[4779]: E0929 19:08:31.495746 4779 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Sep 29 19:08:31 crc kubenswrapper[4779]: E0929 19:08:31.495767 4779 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Sep 29 19:08:31 crc kubenswrapper[4779]: E0929 19:08:31.495769 4779 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Sep 29 19:08:31 crc kubenswrapper[4779]: E0929 19:08:31.495809 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-09-29 19:08:32.495788669 +0000 UTC m=+23.380213989 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 29 19:08:31 crc kubenswrapper[4779]: E0929 19:08:31.495812 4779 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 29 19:08:31 crc kubenswrapper[4779]: E0929 19:08:31.495829 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-09-29 19:08:32.49582129 +0000 UTC m=+23.380246390 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Sep 29 19:08:31 crc kubenswrapper[4779]: E0929 19:08:31.495876 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-09-29 19:08:32.495864041 +0000 UTC m=+23.380289331 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 29 19:08:31 crc kubenswrapper[4779]: I0929 19:08:31.769774 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="01ab3dd5-8196-46d0-ad33-122e2ca51def" path="/var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes" Sep 29 19:08:31 crc kubenswrapper[4779]: I0929 19:08:31.770383 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" path="/var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes" Sep 29 19:08:31 crc kubenswrapper[4779]: I0929 19:08:31.771418 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09efc573-dbb6-4249-bd59-9b87aba8dd28" path="/var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes" Sep 29 19:08:31 crc kubenswrapper[4779]: I0929 19:08:31.772151 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b574797-001e-440a-8f4e-c0be86edad0f" path="/var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes" Sep 29 19:08:31 crc kubenswrapper[4779]: I0929 19:08:31.772899 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b78653f-4ff9-4508-8672-245ed9b561e3" path="/var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes" Sep 29 19:08:31 crc kubenswrapper[4779]: I0929 19:08:31.773491 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1386a44e-36a2-460c-96d0-0359d2b6f0f5" path="/var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes" Sep 29 19:08:31 crc kubenswrapper[4779]: I0929 19:08:31.774234 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1bf7eb37-55a3-4c65-b768-a94c82151e69" path="/var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes" Sep 29 19:08:31 crc kubenswrapper[4779]: I0929 19:08:31.774937 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1d611f23-29be-4491-8495-bee1670e935f" path="/var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes" Sep 29 19:08:31 crc kubenswrapper[4779]: I0929 19:08:31.775650 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="20b0d48f-5fd6-431c-a545-e3c800c7b866" path="/var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/volumes" Sep 29 19:08:31 crc kubenswrapper[4779]: I0929 19:08:31.776198 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" path="/var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes" Sep 29 19:08:31 crc kubenswrapper[4779]: I0929 19:08:31.776824 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="22c825df-677d-4ca6-82db-3454ed06e783" path="/var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes" Sep 29 19:08:31 crc kubenswrapper[4779]: I0929 19:08:31.777575 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="25e176fe-21b4-4974-b1ed-c8b94f112a7f" path="/var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes" Sep 29 19:08:31 crc kubenswrapper[4779]: I0929 19:08:31.778136 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" path="/var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes" Sep 29 19:08:31 crc kubenswrapper[4779]: I0929 19:08:31.778708 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="31d8b7a1-420e-4252-a5b7-eebe8a111292" path="/var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes" Sep 29 19:08:31 crc kubenswrapper[4779]: I0929 19:08:31.779291 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3ab1a177-2de0-46d9-b765-d0d0649bb42e" path="/var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/volumes" Sep 29 19:08:31 crc kubenswrapper[4779]: I0929 19:08:31.779869 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" path="/var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes" Sep 29 19:08:31 crc kubenswrapper[4779]: I0929 19:08:31.783652 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="43509403-f426-496e-be36-56cef71462f5" path="/var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes" Sep 29 19:08:31 crc kubenswrapper[4779]: I0929 19:08:31.784205 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="44663579-783b-4372-86d6-acf235a62d72" path="/var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/volumes" Sep 29 19:08:31 crc kubenswrapper[4779]: I0929 19:08:31.784898 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="496e6271-fb68-4057-954e-a0d97a4afa3f" path="/var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes" Sep 29 19:08:31 crc kubenswrapper[4779]: I0929 19:08:31.786074 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" path="/var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes" Sep 29 19:08:31 crc kubenswrapper[4779]: I0929 19:08:31.786622 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49ef4625-1d3a-4a9f-b595-c2433d32326d" path="/var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/volumes" Sep 29 19:08:31 crc kubenswrapper[4779]: I0929 19:08:31.787253 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4bb40260-dbaa-4fb0-84df-5e680505d512" path="/var/lib/kubelet/pods/4bb40260-dbaa-4fb0-84df-5e680505d512/volumes" Sep 29 19:08:31 crc kubenswrapper[4779]: I0929 19:08:31.788173 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5225d0e4-402f-4861-b410-819f433b1803" path="/var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes" Sep 29 19:08:31 crc kubenswrapper[4779]: I0929 19:08:31.788852 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5441d097-087c-4d9a-baa8-b210afa90fc9" path="/var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes" Sep 29 19:08:31 crc kubenswrapper[4779]: I0929 19:08:31.789801 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="57a731c4-ef35-47a8-b875-bfb08a7f8011" path="/var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes" Sep 29 19:08:31 crc kubenswrapper[4779]: I0929 19:08:31.790505 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5b88f790-22fa-440e-b583-365168c0b23d" path="/var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/volumes" Sep 29 19:08:31 crc kubenswrapper[4779]: I0929 19:08:31.791731 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5fe579f8-e8a6-4643-bce5-a661393c4dde" path="/var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/volumes" Sep 29 19:08:31 crc kubenswrapper[4779]: I0929 19:08:31.792229 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6402fda4-df10-493c-b4e5-d0569419652d" path="/var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes" Sep 29 19:08:31 crc kubenswrapper[4779]: I0929 19:08:31.793500 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6509e943-70c6-444c-bc41-48a544e36fbd" path="/var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes" Sep 29 19:08:31 crc kubenswrapper[4779]: I0929 19:08:31.793974 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6731426b-95fe-49ff-bb5f-40441049fde2" path="/var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/volumes" Sep 29 19:08:31 crc kubenswrapper[4779]: I0929 19:08:31.794528 4779 kubelet_volumes.go:152] "Cleaned up orphaned volume subpath from pod" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volume-subpaths/run-systemd/ovnkube-controller/6" Sep 29 19:08:31 crc kubenswrapper[4779]: I0929 19:08:31.795059 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volumes" Sep 29 19:08:31 crc kubenswrapper[4779]: I0929 19:08:31.796844 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7539238d-5fe0-46ed-884e-1c3b566537ec" path="/var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes" Sep 29 19:08:31 crc kubenswrapper[4779]: I0929 19:08:31.797478 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7583ce53-e0fe-4a16-9e4d-50516596a136" path="/var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes" Sep 29 19:08:31 crc kubenswrapper[4779]: I0929 19:08:31.798471 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7bb08738-c794-4ee8-9972-3a62ca171029" path="/var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes" Sep 29 19:08:31 crc kubenswrapper[4779]: I0929 19:08:31.800224 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="87cf06ed-a83f-41a7-828d-70653580a8cb" path="/var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes" Sep 29 19:08:31 crc kubenswrapper[4779]: I0929 19:08:31.800969 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" path="/var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes" Sep 29 19:08:31 crc kubenswrapper[4779]: I0929 19:08:31.802093 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="925f1c65-6136-48ba-85aa-3a3b50560753" path="/var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes" Sep 29 19:08:31 crc kubenswrapper[4779]: I0929 19:08:31.803112 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" path="/var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/volumes" Sep 29 19:08:31 crc kubenswrapper[4779]: I0929 19:08:31.804644 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9d4552c7-cd75-42dd-8880-30dd377c49a4" path="/var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes" Sep 29 19:08:31 crc kubenswrapper[4779]: I0929 19:08:31.805340 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" path="/var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/volumes" Sep 29 19:08:31 crc kubenswrapper[4779]: I0929 19:08:31.806642 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a31745f5-9847-4afe-82a5-3161cc66ca93" path="/var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes" Sep 29 19:08:31 crc kubenswrapper[4779]: I0929 19:08:31.807438 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" path="/var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes" Sep 29 19:08:31 crc kubenswrapper[4779]: I0929 19:08:31.808529 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6312bbd-5731-4ea0-a20f-81d5a57df44a" path="/var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/volumes" Sep 29 19:08:31 crc kubenswrapper[4779]: I0929 19:08:31.809045 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" path="/var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes" Sep 29 19:08:31 crc kubenswrapper[4779]: I0929 19:08:31.810086 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" path="/var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes" Sep 29 19:08:31 crc kubenswrapper[4779]: I0929 19:08:31.810918 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" path="/var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/volumes" Sep 29 19:08:31 crc kubenswrapper[4779]: I0929 19:08:31.812558 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bf126b07-da06-4140-9a57-dfd54fc6b486" path="/var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes" Sep 29 19:08:31 crc kubenswrapper[4779]: I0929 19:08:31.813154 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c03ee662-fb2f-4fc4-a2c1-af487c19d254" path="/var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes" Sep 29 19:08:31 crc kubenswrapper[4779]: I0929 19:08:31.814140 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" path="/var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/volumes" Sep 29 19:08:31 crc kubenswrapper[4779]: I0929 19:08:31.814798 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e7e6199b-1264-4501-8953-767f51328d08" path="/var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes" Sep 29 19:08:31 crc kubenswrapper[4779]: I0929 19:08:31.815511 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="efdd0498-1daa-4136-9a4a-3b948c2293fc" path="/var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/volumes" Sep 29 19:08:31 crc kubenswrapper[4779]: I0929 19:08:31.816617 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" path="/var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/volumes" Sep 29 19:08:31 crc kubenswrapper[4779]: I0929 19:08:31.817116 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fda69060-fa79-4696-b1a6-7980f124bf7c" path="/var/lib/kubelet/pods/fda69060-fa79-4696-b1a6-7980f124bf7c/volumes" Sep 29 19:08:31 crc kubenswrapper[4779]: I0929 19:08:31.879710 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"4ef5a50f788b42b32f4ec04eedc9f5c9dbcd07f64b430ec684d8a91b414e6c18"} Sep 29 19:08:31 crc kubenswrapper[4779]: I0929 19:08:31.879839 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"fb910a5c408a63f6c4391cf2fae320e14d6dc945125b53eefde3b666ef9d0754"} Sep 29 19:08:31 crc kubenswrapper[4779]: I0929 19:08:31.879866 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"8d219d7a38f99b46bed91561879c8982cb67e9087e0c07f0761ee528ffa1a271"} Sep 29 19:08:31 crc kubenswrapper[4779]: I0929 19:08:31.881399 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-jfbb6" event={"ID":"3ac24bbf-c37a-4253-be71-8d8f15cfd48e","Type":"ContainerStarted","Data":"2ea6959ae00f851559cc059498a3a7848fbbb55f6a25ed82e38efe62a7ee85ca"} Sep 29 19:08:31 crc kubenswrapper[4779]: I0929 19:08:31.881442 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-jfbb6" event={"ID":"3ac24bbf-c37a-4253-be71-8d8f15cfd48e","Type":"ContainerStarted","Data":"fe5502680b49ea47e64761a8953e7667371cdbf2d5a915b54ec73ab8312635cd"} Sep 29 19:08:31 crc kubenswrapper[4779]: I0929 19:08:31.884201 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/1.log" Sep 29 19:08:31 crc kubenswrapper[4779]: I0929 19:08:31.886645 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"246bd1b3b20aff3b78ec13252b58c36ff009a26ba55dc3af5568a20e3b43d234"} Sep 29 19:08:31 crc kubenswrapper[4779]: I0929 19:08:31.886679 4779 scope.go:117] "RemoveContainer" containerID="7b14ae0b08f077b3acbe802e4a5ec06dd10075100c37d5587119a09a92b4651a" Sep 29 19:08:31 crc kubenswrapper[4779]: E0929 19:08:31.886972 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-apiserver-check-endpoints\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\"" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" Sep 29 19:08:31 crc kubenswrapper[4779]: I0929 19:08:31.888518 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-7hb2m" event={"ID":"45b89d12-bbd2-4b47-815d-a7421cc1aa00","Type":"ContainerStarted","Data":"002d12663649198e6136f3e9df16a3b1cd8c64906bf39bde03cd0dde8c36abbc"} Sep 29 19:08:31 crc kubenswrapper[4779]: I0929 19:08:31.888547 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-7hb2m" event={"ID":"45b89d12-bbd2-4b47-815d-a7421cc1aa00","Type":"ContainerStarted","Data":"f46ad163a7a29ec535040914e1e1a527a870add565561f889a5c9ce694efde41"} Sep 29 19:08:31 crc kubenswrapper[4779]: I0929 19:08:31.889132 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 19:08:31 crc kubenswrapper[4779]: I0929 19:08:31.890154 4779 generic.go:334] "Generic (PLEG): container finished" podID="046df2ef-fb75-4d32-93e6-17b36af0a7c2" containerID="e1e03c63d08ebe2a4b76954140684a5b43da9e92b93490aa7efa715fc2aefb4c" exitCode=0 Sep 29 19:08:31 crc kubenswrapper[4779]: I0929 19:08:31.890206 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-42vjg" event={"ID":"046df2ef-fb75-4d32-93e6-17b36af0a7c2","Type":"ContainerDied","Data":"e1e03c63d08ebe2a4b76954140684a5b43da9e92b93490aa7efa715fc2aefb4c"} Sep 29 19:08:31 crc kubenswrapper[4779]: I0929 19:08:31.890223 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-42vjg" event={"ID":"046df2ef-fb75-4d32-93e6-17b36af0a7c2","Type":"ContainerStarted","Data":"66a10a1da39ecac420aef1a9520c7fa31a9b94be01cf95454ef84d875a969b8d"} Sep 29 19:08:31 crc kubenswrapper[4779]: I0929 19:08:31.893402 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" event={"ID":"476bc421-1113-455e-bcc8-e207e47dad19","Type":"ContainerStarted","Data":"30844145987b3dae28d2ca3f75a36e21c27216b267ff40fdb83094b8045403c8"} Sep 29 19:08:31 crc kubenswrapper[4779]: I0929 19:08:31.893445 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" event={"ID":"476bc421-1113-455e-bcc8-e207e47dad19","Type":"ContainerStarted","Data":"7168ecfffda2ef4f7782650466ccebb8abcff3293fcd33d44eb4ee24abf6339c"} Sep 29 19:08:31 crc kubenswrapper[4779]: I0929 19:08:31.893459 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" event={"ID":"476bc421-1113-455e-bcc8-e207e47dad19","Type":"ContainerStarted","Data":"fc6cad44dec707b4c690c13b38787c2573a0f8def6522559a2563f5595e30ac4"} Sep 29 19:08:31 crc kubenswrapper[4779]: I0929 19:08:31.894987 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"04d9dd8d319497dd3ed7c565217d8b9d228a1952a3cd0e8dee3cdad781c5770a"} Sep 29 19:08:31 crc kubenswrapper[4779]: I0929 19:08:31.895051 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"d997424539c521cfc8d60f6aab7a98efcb7073d8a70b0eba1e5340d84c915aa8"} Sep 29 19:08:31 crc kubenswrapper[4779]: I0929 19:08:31.897117 4779 generic.go:334] "Generic (PLEG): container finished" podID="54a33b8e-b623-4f91-be1d-a38dfcef17d7" containerID="e697e7c54a1b8adfde2dfa86399bfd13d895196c12108ac625bd9b94ad913d1e" exitCode=0 Sep 29 19:08:31 crc kubenswrapper[4779]: I0929 19:08:31.897167 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-cvx8m" event={"ID":"54a33b8e-b623-4f91-be1d-a38dfcef17d7","Type":"ContainerDied","Data":"e697e7c54a1b8adfde2dfa86399bfd13d895196c12108ac625bd9b94ad913d1e"} Sep 29 19:08:31 crc kubenswrapper[4779]: I0929 19:08:31.897199 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-cvx8m" event={"ID":"54a33b8e-b623-4f91-be1d-a38dfcef17d7","Type":"ContainerStarted","Data":"daff37cc2bd573d2e50919abc74ca3757f575a9f2f0c006c3267d160f8956f92"} Sep 29 19:08:31 crc kubenswrapper[4779]: I0929 19:08:31.909986 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jfbb6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3ac24bbf-c37a-4253-be71-8d8f15cfd48e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8ckjq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:30Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jfbb6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:31Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:31 crc kubenswrapper[4779]: I0929 19:08:31.924461 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-cvx8m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"54a33b8e-b623-4f91-be1d-a38dfcef17d7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:30Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-cvx8m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:31Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:31 crc kubenswrapper[4779]: I0929 19:08:31.939287 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ef5a50f788b42b32f4ec04eedc9f5c9dbcd07f64b430ec684d8a91b414e6c18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb910a5c408a63f6c4391cf2fae320e14d6dc945125b53eefde3b666ef9d0754\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:31Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:31 crc kubenswrapper[4779]: I0929 19:08:31.959565 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-42vjg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"046df2ef-fb75-4d32-93e6-17b36af0a7c2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:30Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-42vjg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:31Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:31 crc kubenswrapper[4779]: I0929 19:08:31.974101 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2f701cb-2e7d-4cea-8455-f68605964ac6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dfa32890cde2a0a47211239f797f83380b4418ad2b8fb1560c779705e68df39c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a017f2621de03e0184a0a168819a6762e4b36a1ae5c66a6bcfd1d65266fcb52\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a9691127cc0437ceebef7a82219adb83625690db4d6bf5cf1c8177a6a9e1284\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b14ae0b08f077b3acbe802e4a5ec06dd10075100c37d5587119a09a92b4651a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8893063a3ed083fa640798760bb57a5eb0ed66c648d2d052eff803d7e077390d\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T19:08:23Z\\\",\\\"message\\\":\\\"W0929 19:08:12.809143 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0929 19:08:12.811203 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759172892 cert, and key in /tmp/serving-cert-1445773094/serving-signer.crt, /tmp/serving-cert-1445773094/serving-signer.key\\\\nI0929 19:08:13.132775 1 observer_polling.go:159] Starting file observer\\\\nW0929 19:08:13.135435 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0929 19:08:13.135596 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 19:08:13.138018 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1445773094/tls.crt::/tmp/serving-cert-1445773094/tls.key\\\\\\\"\\\\nF0929 19:08:23.559817 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:12Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b14ae0b08f077b3acbe802e4a5ec06dd10075100c37d5587119a09a92b4651a\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T19:08:29Z\\\",\\\"message\\\":\\\"le observer\\\\nW0929 19:08:29.408025 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0929 19:08:29.408423 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 19:08:29.411188 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1038548135/tls.crt::/tmp/serving-cert-1038548135/tls.key\\\\\\\"\\\\nI0929 19:08:29.848353 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0929 19:08:29.866865 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0929 19:08:29.866980 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0929 19:08:29.867032 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0929 19:08:29.867061 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0929 19:08:29.872666 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0929 19:08:29.872766 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0929 19:08:29.872793 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0929 19:08:29.872817 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0929 19:08:29.872839 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0929 19:08:29.872861 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0929 19:08:29.872883 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0929 19:08:29.873077 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0929 19:08:29.875629 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f0916d37ad679d190403229b40390af2860e24f777a07cd5ef0ac2f7d3fc3c41\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5a737775c31cf8dc37838d904ee00f1075bc2aa52bea63587806cb7486950eaf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5a737775c31cf8dc37838d904ee00f1075bc2aa52bea63587806cb7486950eaf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:09Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:31Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:31 crc kubenswrapper[4779]: I0929 19:08:31.990113 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:31Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:32 crc kubenswrapper[4779]: I0929 19:08:32.006514 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-7hb2m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"45b89d12-bbd2-4b47-815d-a7421cc1aa00\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bvn4g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:30Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-7hb2m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:32Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:32 crc kubenswrapper[4779]: I0929 19:08:32.023056 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:32Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:32 crc kubenswrapper[4779]: I0929 19:08:32.034196 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"476bc421-1113-455e-bcc8-e207e47dad19\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tb7zk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tb7zk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:30Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-d5cnr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:32Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:32 crc kubenswrapper[4779]: I0929 19:08:32.047103 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:32Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:32 crc kubenswrapper[4779]: I0929 19:08:32.062975 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:32Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:32 crc kubenswrapper[4779]: I0929 19:08:32.079855 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:32Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:32 crc kubenswrapper[4779]: I0929 19:08:32.101041 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-42vjg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"046df2ef-fb75-4d32-93e6-17b36af0a7c2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e1e03c63d08ebe2a4b76954140684a5b43da9e92b93490aa7efa715fc2aefb4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e1e03c63d08ebe2a4b76954140684a5b43da9e92b93490aa7efa715fc2aefb4c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:30Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-42vjg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:32Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:32 crc kubenswrapper[4779]: I0929 19:08:32.119129 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2f701cb-2e7d-4cea-8455-f68605964ac6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dfa32890cde2a0a47211239f797f83380b4418ad2b8fb1560c779705e68df39c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a017f2621de03e0184a0a168819a6762e4b36a1ae5c66a6bcfd1d65266fcb52\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a9691127cc0437ceebef7a82219adb83625690db4d6bf5cf1c8177a6a9e1284\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b14ae0b08f077b3acbe802e4a5ec06dd10075100c37d5587119a09a92b4651a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b14ae0b08f077b3acbe802e4a5ec06dd10075100c37d5587119a09a92b4651a\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T19:08:29Z\\\",\\\"message\\\":\\\"le observer\\\\nW0929 19:08:29.408025 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0929 19:08:29.408423 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 19:08:29.411188 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1038548135/tls.crt::/tmp/serving-cert-1038548135/tls.key\\\\\\\"\\\\nI0929 19:08:29.848353 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0929 19:08:29.866865 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0929 19:08:29.866980 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0929 19:08:29.867032 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0929 19:08:29.867061 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0929 19:08:29.872666 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0929 19:08:29.872766 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0929 19:08:29.872793 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0929 19:08:29.872817 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0929 19:08:29.872839 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0929 19:08:29.872861 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0929 19:08:29.872883 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0929 19:08:29.873077 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0929 19:08:29.875629 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:24Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f0916d37ad679d190403229b40390af2860e24f777a07cd5ef0ac2f7d3fc3c41\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5a737775c31cf8dc37838d904ee00f1075bc2aa52bea63587806cb7486950eaf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5a737775c31cf8dc37838d904ee00f1075bc2aa52bea63587806cb7486950eaf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:09Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:32Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:32 crc kubenswrapper[4779]: I0929 19:08:32.131085 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-7hb2m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"45b89d12-bbd2-4b47-815d-a7421cc1aa00\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://002d12663649198e6136f3e9df16a3b1cd8c64906bf39bde03cd0dde8c36abbc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bvn4g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:30Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-7hb2m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:32Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:32 crc kubenswrapper[4779]: I0929 19:08:32.146042 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:32Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:32 crc kubenswrapper[4779]: I0929 19:08:32.161441 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://04d9dd8d319497dd3ed7c565217d8b9d228a1952a3cd0e8dee3cdad781c5770a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:32Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:32 crc kubenswrapper[4779]: I0929 19:08:32.173538 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"476bc421-1113-455e-bcc8-e207e47dad19\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://30844145987b3dae28d2ca3f75a36e21c27216b267ff40fdb83094b8045403c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tb7zk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7168ecfffda2ef4f7782650466ccebb8abcff3293fcd33d44eb4ee24abf6339c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tb7zk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:30Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-d5cnr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:32Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:32 crc kubenswrapper[4779]: I0929 19:08:32.187826 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:32Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:32 crc kubenswrapper[4779]: I0929 19:08:32.201656 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ef5a50f788b42b32f4ec04eedc9f5c9dbcd07f64b430ec684d8a91b414e6c18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb910a5c408a63f6c4391cf2fae320e14d6dc945125b53eefde3b666ef9d0754\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:32Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:32 crc kubenswrapper[4779]: I0929 19:08:32.217562 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:32Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:32 crc kubenswrapper[4779]: I0929 19:08:32.247345 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jfbb6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3ac24bbf-c37a-4253-be71-8d8f15cfd48e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2ea6959ae00f851559cc059498a3a7848fbbb55f6a25ed82e38efe62a7ee85ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8ckjq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:30Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jfbb6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:32Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:32 crc kubenswrapper[4779]: I0929 19:08:32.267409 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-cvx8m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"54a33b8e-b623-4f91-be1d-a38dfcef17d7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e697e7c54a1b8adfde2dfa86399bfd13d895196c12108ac625bd9b94ad913d1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e697e7c54a1b8adfde2dfa86399bfd13d895196c12108ac625bd9b94ad913d1e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:30Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-cvx8m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:32Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:32 crc kubenswrapper[4779]: I0929 19:08:32.505255 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 19:08:32 crc kubenswrapper[4779]: I0929 19:08:32.505444 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 19:08:32 crc kubenswrapper[4779]: E0929 19:08:32.505495 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 19:08:34.505462692 +0000 UTC m=+25.389887842 (durationBeforeRetry 2s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 19:08:32 crc kubenswrapper[4779]: I0929 19:08:32.505565 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 19:08:32 crc kubenswrapper[4779]: E0929 19:08:32.505613 4779 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Sep 29 19:08:32 crc kubenswrapper[4779]: E0929 19:08:32.505653 4779 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Sep 29 19:08:32 crc kubenswrapper[4779]: E0929 19:08:32.505669 4779 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 29 19:08:32 crc kubenswrapper[4779]: E0929 19:08:32.505736 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-09-29 19:08:34.505712718 +0000 UTC m=+25.390137928 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 29 19:08:32 crc kubenswrapper[4779]: E0929 19:08:32.505744 4779 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Sep 29 19:08:32 crc kubenswrapper[4779]: E0929 19:08:32.505754 4779 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Sep 29 19:08:32 crc kubenswrapper[4779]: I0929 19:08:32.505627 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 19:08:32 crc kubenswrapper[4779]: E0929 19:08:32.505768 4779 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Sep 29 19:08:32 crc kubenswrapper[4779]: E0929 19:08:32.505840 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-09-29 19:08:34.505814261 +0000 UTC m=+25.390239361 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Sep 29 19:08:32 crc kubenswrapper[4779]: E0929 19:08:32.505851 4779 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 29 19:08:32 crc kubenswrapper[4779]: I0929 19:08:32.505873 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 19:08:32 crc kubenswrapper[4779]: E0929 19:08:32.505894 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-09-29 19:08:34.505882693 +0000 UTC m=+25.390307993 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 29 19:08:32 crc kubenswrapper[4779]: E0929 19:08:32.505961 4779 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Sep 29 19:08:32 crc kubenswrapper[4779]: E0929 19:08:32.505987 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-09-29 19:08:34.505978785 +0000 UTC m=+25.390403885 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Sep 29 19:08:32 crc kubenswrapper[4779]: I0929 19:08:32.765462 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 19:08:32 crc kubenswrapper[4779]: I0929 19:08:32.765470 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 19:08:32 crc kubenswrapper[4779]: I0929 19:08:32.765503 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 19:08:32 crc kubenswrapper[4779]: E0929 19:08:32.765911 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 19:08:32 crc kubenswrapper[4779]: E0929 19:08:32.765940 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 19:08:32 crc kubenswrapper[4779]: E0929 19:08:32.765790 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 19:08:32 crc kubenswrapper[4779]: I0929 19:08:32.906981 4779 generic.go:334] "Generic (PLEG): container finished" podID="54a33b8e-b623-4f91-be1d-a38dfcef17d7" containerID="75932bd27c986408eb2a3ec9aefffa392bf1ede4be0ddeabb9fdcd5fcc56199d" exitCode=0 Sep 29 19:08:32 crc kubenswrapper[4779]: I0929 19:08:32.907075 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-cvx8m" event={"ID":"54a33b8e-b623-4f91-be1d-a38dfcef17d7","Type":"ContainerDied","Data":"75932bd27c986408eb2a3ec9aefffa392bf1ede4be0ddeabb9fdcd5fcc56199d"} Sep 29 19:08:32 crc kubenswrapper[4779]: I0929 19:08:32.921405 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-42vjg" event={"ID":"046df2ef-fb75-4d32-93e6-17b36af0a7c2","Type":"ContainerStarted","Data":"6b6ffbcb52a6e250cd4b4bc4e8c089ef94babaaf0db836fec9f477450464eebe"} Sep 29 19:08:32 crc kubenswrapper[4779]: I0929 19:08:32.921500 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-42vjg" event={"ID":"046df2ef-fb75-4d32-93e6-17b36af0a7c2","Type":"ContainerStarted","Data":"1555e96e70a9119cf2298d91512b1d8cffb303b4327d0e20bdf686e7e36e4461"} Sep 29 19:08:32 crc kubenswrapper[4779]: I0929 19:08:32.921521 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-42vjg" event={"ID":"046df2ef-fb75-4d32-93e6-17b36af0a7c2","Type":"ContainerStarted","Data":"629d703a556665e86ce3b0a5af995157440c71c8ab7d29b9259865cd624543c7"} Sep 29 19:08:32 crc kubenswrapper[4779]: I0929 19:08:32.921535 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-42vjg" event={"ID":"046df2ef-fb75-4d32-93e6-17b36af0a7c2","Type":"ContainerStarted","Data":"76304a56be996b646d706c0e7df995bb1518962396c7558304b1627e5c215c11"} Sep 29 19:08:32 crc kubenswrapper[4779]: I0929 19:08:32.921547 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-42vjg" event={"ID":"046df2ef-fb75-4d32-93e6-17b36af0a7c2","Type":"ContainerStarted","Data":"6dcd0277739f6d5e5c90c617485f053c1c3996192f15b081c660741987ec2611"} Sep 29 19:08:32 crc kubenswrapper[4779]: I0929 19:08:32.925218 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2f701cb-2e7d-4cea-8455-f68605964ac6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dfa32890cde2a0a47211239f797f83380b4418ad2b8fb1560c779705e68df39c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a017f2621de03e0184a0a168819a6762e4b36a1ae5c66a6bcfd1d65266fcb52\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a9691127cc0437ceebef7a82219adb83625690db4d6bf5cf1c8177a6a9e1284\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b14ae0b08f077b3acbe802e4a5ec06dd10075100c37d5587119a09a92b4651a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b14ae0b08f077b3acbe802e4a5ec06dd10075100c37d5587119a09a92b4651a\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T19:08:29Z\\\",\\\"message\\\":\\\"le observer\\\\nW0929 19:08:29.408025 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0929 19:08:29.408423 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 19:08:29.411188 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1038548135/tls.crt::/tmp/serving-cert-1038548135/tls.key\\\\\\\"\\\\nI0929 19:08:29.848353 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0929 19:08:29.866865 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0929 19:08:29.866980 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0929 19:08:29.867032 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0929 19:08:29.867061 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0929 19:08:29.872666 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0929 19:08:29.872766 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0929 19:08:29.872793 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0929 19:08:29.872817 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0929 19:08:29.872839 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0929 19:08:29.872861 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0929 19:08:29.872883 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0929 19:08:29.873077 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0929 19:08:29.875629 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:24Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f0916d37ad679d190403229b40390af2860e24f777a07cd5ef0ac2f7d3fc3c41\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5a737775c31cf8dc37838d904ee00f1075bc2aa52bea63587806cb7486950eaf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5a737775c31cf8dc37838d904ee00f1075bc2aa52bea63587806cb7486950eaf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:09Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:32Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:32 crc kubenswrapper[4779]: I0929 19:08:32.947266 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:32Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:32 crc kubenswrapper[4779]: I0929 19:08:32.982394 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-42vjg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"046df2ef-fb75-4d32-93e6-17b36af0a7c2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e1e03c63d08ebe2a4b76954140684a5b43da9e92b93490aa7efa715fc2aefb4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e1e03c63d08ebe2a4b76954140684a5b43da9e92b93490aa7efa715fc2aefb4c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:30Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-42vjg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:32Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:32 crc kubenswrapper[4779]: I0929 19:08:32.997994 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-7hb2m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"45b89d12-bbd2-4b47-815d-a7421cc1aa00\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://002d12663649198e6136f3e9df16a3b1cd8c64906bf39bde03cd0dde8c36abbc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bvn4g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:30Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-7hb2m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:32Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:33 crc kubenswrapper[4779]: I0929 19:08:33.013428 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:33Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:33 crc kubenswrapper[4779]: I0929 19:08:33.027228 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:33Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:33 crc kubenswrapper[4779]: I0929 19:08:33.042280 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://04d9dd8d319497dd3ed7c565217d8b9d228a1952a3cd0e8dee3cdad781c5770a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:33Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:33 crc kubenswrapper[4779]: I0929 19:08:33.057111 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"476bc421-1113-455e-bcc8-e207e47dad19\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://30844145987b3dae28d2ca3f75a36e21c27216b267ff40fdb83094b8045403c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tb7zk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7168ecfffda2ef4f7782650466ccebb8abcff3293fcd33d44eb4ee24abf6339c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tb7zk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:30Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-d5cnr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:33Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:33 crc kubenswrapper[4779]: I0929 19:08:33.069811 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ef5a50f788b42b32f4ec04eedc9f5c9dbcd07f64b430ec684d8a91b414e6c18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb910a5c408a63f6c4391cf2fae320e14d6dc945125b53eefde3b666ef9d0754\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:33Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:33 crc kubenswrapper[4779]: I0929 19:08:33.084851 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:33Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:33 crc kubenswrapper[4779]: I0929 19:08:33.103818 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jfbb6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3ac24bbf-c37a-4253-be71-8d8f15cfd48e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2ea6959ae00f851559cc059498a3a7848fbbb55f6a25ed82e38efe62a7ee85ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8ckjq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:30Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jfbb6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:33Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:33 crc kubenswrapper[4779]: I0929 19:08:33.120289 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-cvx8m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"54a33b8e-b623-4f91-be1d-a38dfcef17d7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with incomplete status: [bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e697e7c54a1b8adfde2dfa86399bfd13d895196c12108ac625bd9b94ad913d1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e697e7c54a1b8adfde2dfa86399bfd13d895196c12108ac625bd9b94ad913d1e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://75932bd27c986408eb2a3ec9aefffa392bf1ede4be0ddeabb9fdcd5fcc56199d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://75932bd27c986408eb2a3ec9aefffa392bf1ede4be0ddeabb9fdcd5fcc56199d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:30Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-cvx8m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:33Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:33 crc kubenswrapper[4779]: I0929 19:08:33.850415 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/node-ca-zxpg4"] Sep 29 19:08:33 crc kubenswrapper[4779]: I0929 19:08:33.851210 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-zxpg4" Sep 29 19:08:33 crc kubenswrapper[4779]: I0929 19:08:33.853554 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"image-registry-certificates" Sep 29 19:08:33 crc kubenswrapper[4779]: I0929 19:08:33.853804 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"node-ca-dockercfg-4777p" Sep 29 19:08:33 crc kubenswrapper[4779]: I0929 19:08:33.854691 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"openshift-service-ca.crt" Sep 29 19:08:33 crc kubenswrapper[4779]: I0929 19:08:33.855372 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"kube-root-ca.crt" Sep 29 19:08:33 crc kubenswrapper[4779]: I0929 19:08:33.874898 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2f701cb-2e7d-4cea-8455-f68605964ac6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dfa32890cde2a0a47211239f797f83380b4418ad2b8fb1560c779705e68df39c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a017f2621de03e0184a0a168819a6762e4b36a1ae5c66a6bcfd1d65266fcb52\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a9691127cc0437ceebef7a82219adb83625690db4d6bf5cf1c8177a6a9e1284\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b14ae0b08f077b3acbe802e4a5ec06dd10075100c37d5587119a09a92b4651a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b14ae0b08f077b3acbe802e4a5ec06dd10075100c37d5587119a09a92b4651a\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T19:08:29Z\\\",\\\"message\\\":\\\"le observer\\\\nW0929 19:08:29.408025 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0929 19:08:29.408423 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 19:08:29.411188 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1038548135/tls.crt::/tmp/serving-cert-1038548135/tls.key\\\\\\\"\\\\nI0929 19:08:29.848353 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0929 19:08:29.866865 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0929 19:08:29.866980 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0929 19:08:29.867032 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0929 19:08:29.867061 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0929 19:08:29.872666 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0929 19:08:29.872766 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0929 19:08:29.872793 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0929 19:08:29.872817 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0929 19:08:29.872839 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0929 19:08:29.872861 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0929 19:08:29.872883 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0929 19:08:29.873077 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0929 19:08:29.875629 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:24Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f0916d37ad679d190403229b40390af2860e24f777a07cd5ef0ac2f7d3fc3c41\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5a737775c31cf8dc37838d904ee00f1075bc2aa52bea63587806cb7486950eaf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5a737775c31cf8dc37838d904ee00f1075bc2aa52bea63587806cb7486950eaf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:09Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:33Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:33 crc kubenswrapper[4779]: I0929 19:08:33.894708 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:33Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:33 crc kubenswrapper[4779]: I0929 19:08:33.928882 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-42vjg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"046df2ef-fb75-4d32-93e6-17b36af0a7c2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e1e03c63d08ebe2a4b76954140684a5b43da9e92b93490aa7efa715fc2aefb4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e1e03c63d08ebe2a4b76954140684a5b43da9e92b93490aa7efa715fc2aefb4c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:30Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-42vjg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:33Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:33 crc kubenswrapper[4779]: I0929 19:08:33.929888 4779 generic.go:334] "Generic (PLEG): container finished" podID="54a33b8e-b623-4f91-be1d-a38dfcef17d7" containerID="e058271bf65bb6f216298a9fc58b97c76335275829be7e5bec59d92688e83a05" exitCode=0 Sep 29 19:08:33 crc kubenswrapper[4779]: I0929 19:08:33.929969 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-cvx8m" event={"ID":"54a33b8e-b623-4f91-be1d-a38dfcef17d7","Type":"ContainerDied","Data":"e058271bf65bb6f216298a9fc58b97c76335275829be7e5bec59d92688e83a05"} Sep 29 19:08:33 crc kubenswrapper[4779]: I0929 19:08:33.936687 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-42vjg" event={"ID":"046df2ef-fb75-4d32-93e6-17b36af0a7c2","Type":"ContainerStarted","Data":"0d18ddf7528c789be80d32cd6921ada5ed114d01130b7be97e73ec6d9168267e"} Sep 29 19:08:33 crc kubenswrapper[4779]: I0929 19:08:33.946197 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-zxpg4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e0db9b28-7e3a-4d44-9e98-1a07c8e5b8d6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:33Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:33Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6ghv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:33Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-zxpg4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:33Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:33 crc kubenswrapper[4779]: I0929 19:08:33.962422 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-7hb2m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"45b89d12-bbd2-4b47-815d-a7421cc1aa00\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://002d12663649198e6136f3e9df16a3b1cd8c64906bf39bde03cd0dde8c36abbc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bvn4g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:30Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-7hb2m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:33Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:33 crc kubenswrapper[4779]: I0929 19:08:33.982741 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:33Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:33 crc kubenswrapper[4779]: I0929 19:08:33.999589 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:33Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:34 crc kubenswrapper[4779]: I0929 19:08:34.017626 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://04d9dd8d319497dd3ed7c565217d8b9d228a1952a3cd0e8dee3cdad781c5770a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:34Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:34 crc kubenswrapper[4779]: I0929 19:08:34.021009 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/e0db9b28-7e3a-4d44-9e98-1a07c8e5b8d6-host\") pod \"node-ca-zxpg4\" (UID: \"e0db9b28-7e3a-4d44-9e98-1a07c8e5b8d6\") " pod="openshift-image-registry/node-ca-zxpg4" Sep 29 19:08:34 crc kubenswrapper[4779]: I0929 19:08:34.021062 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/e0db9b28-7e3a-4d44-9e98-1a07c8e5b8d6-serviceca\") pod \"node-ca-zxpg4\" (UID: \"e0db9b28-7e3a-4d44-9e98-1a07c8e5b8d6\") " pod="openshift-image-registry/node-ca-zxpg4" Sep 29 19:08:34 crc kubenswrapper[4779]: I0929 19:08:34.021543 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6ghv2\" (UniqueName: \"kubernetes.io/projected/e0db9b28-7e3a-4d44-9e98-1a07c8e5b8d6-kube-api-access-6ghv2\") pod \"node-ca-zxpg4\" (UID: \"e0db9b28-7e3a-4d44-9e98-1a07c8e5b8d6\") " pod="openshift-image-registry/node-ca-zxpg4" Sep 29 19:08:34 crc kubenswrapper[4779]: I0929 19:08:34.033559 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"476bc421-1113-455e-bcc8-e207e47dad19\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://30844145987b3dae28d2ca3f75a36e21c27216b267ff40fdb83094b8045403c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tb7zk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7168ecfffda2ef4f7782650466ccebb8abcff3293fcd33d44eb4ee24abf6339c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tb7zk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:30Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-d5cnr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:34Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:34 crc kubenswrapper[4779]: I0929 19:08:34.051460 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-cvx8m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"54a33b8e-b623-4f91-be1d-a38dfcef17d7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with incomplete status: [bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e697e7c54a1b8adfde2dfa86399bfd13d895196c12108ac625bd9b94ad913d1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e697e7c54a1b8adfde2dfa86399bfd13d895196c12108ac625bd9b94ad913d1e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://75932bd27c986408eb2a3ec9aefffa392bf1ede4be0ddeabb9fdcd5fcc56199d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://75932bd27c986408eb2a3ec9aefffa392bf1ede4be0ddeabb9fdcd5fcc56199d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:30Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-cvx8m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:34Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:34 crc kubenswrapper[4779]: I0929 19:08:34.066867 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ef5a50f788b42b32f4ec04eedc9f5c9dbcd07f64b430ec684d8a91b414e6c18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb910a5c408a63f6c4391cf2fae320e14d6dc945125b53eefde3b666ef9d0754\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:34Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:34 crc kubenswrapper[4779]: I0929 19:08:34.080906 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:34Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:34 crc kubenswrapper[4779]: I0929 19:08:34.093607 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jfbb6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3ac24bbf-c37a-4253-be71-8d8f15cfd48e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2ea6959ae00f851559cc059498a3a7848fbbb55f6a25ed82e38efe62a7ee85ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8ckjq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:30Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jfbb6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:34Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:34 crc kubenswrapper[4779]: I0929 19:08:34.107696 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ef5a50f788b42b32f4ec04eedc9f5c9dbcd07f64b430ec684d8a91b414e6c18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb910a5c408a63f6c4391cf2fae320e14d6dc945125b53eefde3b666ef9d0754\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:34Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:34 crc kubenswrapper[4779]: I0929 19:08:34.122290 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:34Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:34 crc kubenswrapper[4779]: I0929 19:08:34.122518 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/e0db9b28-7e3a-4d44-9e98-1a07c8e5b8d6-serviceca\") pod \"node-ca-zxpg4\" (UID: \"e0db9b28-7e3a-4d44-9e98-1a07c8e5b8d6\") " pod="openshift-image-registry/node-ca-zxpg4" Sep 29 19:08:34 crc kubenswrapper[4779]: I0929 19:08:34.122571 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/e0db9b28-7e3a-4d44-9e98-1a07c8e5b8d6-host\") pod \"node-ca-zxpg4\" (UID: \"e0db9b28-7e3a-4d44-9e98-1a07c8e5b8d6\") " pod="openshift-image-registry/node-ca-zxpg4" Sep 29 19:08:34 crc kubenswrapper[4779]: I0929 19:08:34.122626 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6ghv2\" (UniqueName: \"kubernetes.io/projected/e0db9b28-7e3a-4d44-9e98-1a07c8e5b8d6-kube-api-access-6ghv2\") pod \"node-ca-zxpg4\" (UID: \"e0db9b28-7e3a-4d44-9e98-1a07c8e5b8d6\") " pod="openshift-image-registry/node-ca-zxpg4" Sep 29 19:08:34 crc kubenswrapper[4779]: I0929 19:08:34.122860 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/e0db9b28-7e3a-4d44-9e98-1a07c8e5b8d6-host\") pod \"node-ca-zxpg4\" (UID: \"e0db9b28-7e3a-4d44-9e98-1a07c8e5b8d6\") " pod="openshift-image-registry/node-ca-zxpg4" Sep 29 19:08:34 crc kubenswrapper[4779]: I0929 19:08:34.124587 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/e0db9b28-7e3a-4d44-9e98-1a07c8e5b8d6-serviceca\") pod \"node-ca-zxpg4\" (UID: \"e0db9b28-7e3a-4d44-9e98-1a07c8e5b8d6\") " pod="openshift-image-registry/node-ca-zxpg4" Sep 29 19:08:34 crc kubenswrapper[4779]: I0929 19:08:34.133325 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jfbb6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3ac24bbf-c37a-4253-be71-8d8f15cfd48e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2ea6959ae00f851559cc059498a3a7848fbbb55f6a25ed82e38efe62a7ee85ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8ckjq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:30Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jfbb6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:34Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:34 crc kubenswrapper[4779]: I0929 19:08:34.139453 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6ghv2\" (UniqueName: \"kubernetes.io/projected/e0db9b28-7e3a-4d44-9e98-1a07c8e5b8d6-kube-api-access-6ghv2\") pod \"node-ca-zxpg4\" (UID: \"e0db9b28-7e3a-4d44-9e98-1a07c8e5b8d6\") " pod="openshift-image-registry/node-ca-zxpg4" Sep 29 19:08:34 crc kubenswrapper[4779]: I0929 19:08:34.149759 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-cvx8m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"54a33b8e-b623-4f91-be1d-a38dfcef17d7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with incomplete status: [routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e697e7c54a1b8adfde2dfa86399bfd13d895196c12108ac625bd9b94ad913d1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e697e7c54a1b8adfde2dfa86399bfd13d895196c12108ac625bd9b94ad913d1e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://75932bd27c986408eb2a3ec9aefffa392bf1ede4be0ddeabb9fdcd5fcc56199d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://75932bd27c986408eb2a3ec9aefffa392bf1ede4be0ddeabb9fdcd5fcc56199d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e058271bf65bb6f216298a9fc58b97c76335275829be7e5bec59d92688e83a05\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e058271bf65bb6f216298a9fc58b97c76335275829be7e5bec59d92688e83a05\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:30Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-cvx8m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:34Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:34 crc kubenswrapper[4779]: I0929 19:08:34.166779 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2f701cb-2e7d-4cea-8455-f68605964ac6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dfa32890cde2a0a47211239f797f83380b4418ad2b8fb1560c779705e68df39c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a017f2621de03e0184a0a168819a6762e4b36a1ae5c66a6bcfd1d65266fcb52\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a9691127cc0437ceebef7a82219adb83625690db4d6bf5cf1c8177a6a9e1284\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b14ae0b08f077b3acbe802e4a5ec06dd10075100c37d5587119a09a92b4651a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b14ae0b08f077b3acbe802e4a5ec06dd10075100c37d5587119a09a92b4651a\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T19:08:29Z\\\",\\\"message\\\":\\\"le observer\\\\nW0929 19:08:29.408025 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0929 19:08:29.408423 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 19:08:29.411188 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1038548135/tls.crt::/tmp/serving-cert-1038548135/tls.key\\\\\\\"\\\\nI0929 19:08:29.848353 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0929 19:08:29.866865 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0929 19:08:29.866980 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0929 19:08:29.867032 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0929 19:08:29.867061 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0929 19:08:29.872666 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0929 19:08:29.872766 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0929 19:08:29.872793 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0929 19:08:29.872817 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0929 19:08:29.872839 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0929 19:08:29.872861 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0929 19:08:29.872883 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0929 19:08:29.873077 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0929 19:08:29.875629 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:24Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f0916d37ad679d190403229b40390af2860e24f777a07cd5ef0ac2f7d3fc3c41\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5a737775c31cf8dc37838d904ee00f1075bc2aa52bea63587806cb7486950eaf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5a737775c31cf8dc37838d904ee00f1075bc2aa52bea63587806cb7486950eaf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:09Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:34Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:34 crc kubenswrapper[4779]: I0929 19:08:34.172215 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-zxpg4" Sep 29 19:08:34 crc kubenswrapper[4779]: I0929 19:08:34.183186 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:34Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:34 crc kubenswrapper[4779]: W0929 19:08:34.186843 4779 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode0db9b28_7e3a_4d44_9e98_1a07c8e5b8d6.slice/crio-4a19fa7a25fa4a7eaf509057f248dd22ed0b368a54ae4b01a19f291103ac28e4 WatchSource:0}: Error finding container 4a19fa7a25fa4a7eaf509057f248dd22ed0b368a54ae4b01a19f291103ac28e4: Status 404 returned error can't find the container with id 4a19fa7a25fa4a7eaf509057f248dd22ed0b368a54ae4b01a19f291103ac28e4 Sep 29 19:08:34 crc kubenswrapper[4779]: I0929 19:08:34.210908 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-42vjg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"046df2ef-fb75-4d32-93e6-17b36af0a7c2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e1e03c63d08ebe2a4b76954140684a5b43da9e92b93490aa7efa715fc2aefb4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e1e03c63d08ebe2a4b76954140684a5b43da9e92b93490aa7efa715fc2aefb4c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:30Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-42vjg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:34Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:34 crc kubenswrapper[4779]: I0929 19:08:34.223446 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-zxpg4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e0db9b28-7e3a-4d44-9e98-1a07c8e5b8d6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:33Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:33Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6ghv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:33Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-zxpg4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:34Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:34 crc kubenswrapper[4779]: I0929 19:08:34.240134 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:34Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:34 crc kubenswrapper[4779]: I0929 19:08:34.251300 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-7hb2m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"45b89d12-bbd2-4b47-815d-a7421cc1aa00\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://002d12663649198e6136f3e9df16a3b1cd8c64906bf39bde03cd0dde8c36abbc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bvn4g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:30Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-7hb2m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:34Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:34 crc kubenswrapper[4779]: I0929 19:08:34.270673 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:34Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:34 crc kubenswrapper[4779]: I0929 19:08:34.286234 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://04d9dd8d319497dd3ed7c565217d8b9d228a1952a3cd0e8dee3cdad781c5770a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:34Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:34 crc kubenswrapper[4779]: I0929 19:08:34.298931 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"476bc421-1113-455e-bcc8-e207e47dad19\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://30844145987b3dae28d2ca3f75a36e21c27216b267ff40fdb83094b8045403c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tb7zk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7168ecfffda2ef4f7782650466ccebb8abcff3293fcd33d44eb4ee24abf6339c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tb7zk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:30Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-d5cnr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:34Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:34 crc kubenswrapper[4779]: I0929 19:08:34.305053 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Sep 29 19:08:34 crc kubenswrapper[4779]: I0929 19:08:34.312232 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager/kube-controller-manager-crc"] Sep 29 19:08:34 crc kubenswrapper[4779]: I0929 19:08:34.316718 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Sep 29 19:08:34 crc kubenswrapper[4779]: I0929 19:08:34.319707 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:34Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:34 crc kubenswrapper[4779]: I0929 19:08:34.337378 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://04d9dd8d319497dd3ed7c565217d8b9d228a1952a3cd0e8dee3cdad781c5770a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:34Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:34 crc kubenswrapper[4779]: I0929 19:08:34.352067 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"476bc421-1113-455e-bcc8-e207e47dad19\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://30844145987b3dae28d2ca3f75a36e21c27216b267ff40fdb83094b8045403c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tb7zk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7168ecfffda2ef4f7782650466ccebb8abcff3293fcd33d44eb4ee24abf6339c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tb7zk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:30Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-d5cnr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:34Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:34 crc kubenswrapper[4779]: I0929 19:08:34.371960 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-cvx8m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"54a33b8e-b623-4f91-be1d-a38dfcef17d7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with incomplete status: [routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e697e7c54a1b8adfde2dfa86399bfd13d895196c12108ac625bd9b94ad913d1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e697e7c54a1b8adfde2dfa86399bfd13d895196c12108ac625bd9b94ad913d1e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://75932bd27c986408eb2a3ec9aefffa392bf1ede4be0ddeabb9fdcd5fcc56199d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://75932bd27c986408eb2a3ec9aefffa392bf1ede4be0ddeabb9fdcd5fcc56199d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e058271bf65bb6f216298a9fc58b97c76335275829be7e5bec59d92688e83a05\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e058271bf65bb6f216298a9fc58b97c76335275829be7e5bec59d92688e83a05\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:30Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-cvx8m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:34Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:34 crc kubenswrapper[4779]: I0929 19:08:34.388490 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ef5a50f788b42b32f4ec04eedc9f5c9dbcd07f64b430ec684d8a91b414e6c18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb910a5c408a63f6c4391cf2fae320e14d6dc945125b53eefde3b666ef9d0754\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:34Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:34 crc kubenswrapper[4779]: I0929 19:08:34.402882 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:34Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:34 crc kubenswrapper[4779]: I0929 19:08:34.416841 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jfbb6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3ac24bbf-c37a-4253-be71-8d8f15cfd48e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2ea6959ae00f851559cc059498a3a7848fbbb55f6a25ed82e38efe62a7ee85ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8ckjq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:30Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jfbb6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:34Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:34 crc kubenswrapper[4779]: I0929 19:08:34.433747 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2f701cb-2e7d-4cea-8455-f68605964ac6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dfa32890cde2a0a47211239f797f83380b4418ad2b8fb1560c779705e68df39c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a017f2621de03e0184a0a168819a6762e4b36a1ae5c66a6bcfd1d65266fcb52\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a9691127cc0437ceebef7a82219adb83625690db4d6bf5cf1c8177a6a9e1284\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b14ae0b08f077b3acbe802e4a5ec06dd10075100c37d5587119a09a92b4651a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b14ae0b08f077b3acbe802e4a5ec06dd10075100c37d5587119a09a92b4651a\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T19:08:29Z\\\",\\\"message\\\":\\\"le observer\\\\nW0929 19:08:29.408025 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0929 19:08:29.408423 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 19:08:29.411188 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1038548135/tls.crt::/tmp/serving-cert-1038548135/tls.key\\\\\\\"\\\\nI0929 19:08:29.848353 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0929 19:08:29.866865 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0929 19:08:29.866980 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0929 19:08:29.867032 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0929 19:08:29.867061 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0929 19:08:29.872666 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0929 19:08:29.872766 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0929 19:08:29.872793 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0929 19:08:29.872817 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0929 19:08:29.872839 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0929 19:08:29.872861 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0929 19:08:29.872883 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0929 19:08:29.873077 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0929 19:08:29.875629 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:24Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f0916d37ad679d190403229b40390af2860e24f777a07cd5ef0ac2f7d3fc3c41\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5a737775c31cf8dc37838d904ee00f1075bc2aa52bea63587806cb7486950eaf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5a737775c31cf8dc37838d904ee00f1075bc2aa52bea63587806cb7486950eaf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:09Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:34Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:34 crc kubenswrapper[4779]: I0929 19:08:34.448194 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:34Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:34 crc kubenswrapper[4779]: I0929 19:08:34.466301 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-42vjg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"046df2ef-fb75-4d32-93e6-17b36af0a7c2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e1e03c63d08ebe2a4b76954140684a5b43da9e92b93490aa7efa715fc2aefb4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e1e03c63d08ebe2a4b76954140684a5b43da9e92b93490aa7efa715fc2aefb4c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:30Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-42vjg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:34Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:34 crc kubenswrapper[4779]: I0929 19:08:34.479109 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-zxpg4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e0db9b28-7e3a-4d44-9e98-1a07c8e5b8d6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:33Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:33Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6ghv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:33Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-zxpg4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:34Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:34 crc kubenswrapper[4779]: I0929 19:08:34.490228 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-7hb2m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"45b89d12-bbd2-4b47-815d-a7421cc1aa00\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://002d12663649198e6136f3e9df16a3b1cd8c64906bf39bde03cd0dde8c36abbc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bvn4g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:30Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-7hb2m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:34Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:34 crc kubenswrapper[4779]: I0929 19:08:34.505254 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:34Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:34 crc kubenswrapper[4779]: I0929 19:08:34.523984 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-cvx8m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"54a33b8e-b623-4f91-be1d-a38dfcef17d7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with incomplete status: [routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e697e7c54a1b8adfde2dfa86399bfd13d895196c12108ac625bd9b94ad913d1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e697e7c54a1b8adfde2dfa86399bfd13d895196c12108ac625bd9b94ad913d1e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://75932bd27c986408eb2a3ec9aefffa392bf1ede4be0ddeabb9fdcd5fcc56199d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://75932bd27c986408eb2a3ec9aefffa392bf1ede4be0ddeabb9fdcd5fcc56199d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e058271bf65bb6f216298a9fc58b97c76335275829be7e5bec59d92688e83a05\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e058271bf65bb6f216298a9fc58b97c76335275829be7e5bec59d92688e83a05\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:30Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-cvx8m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:34Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:34 crc kubenswrapper[4779]: I0929 19:08:34.526363 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 19:08:34 crc kubenswrapper[4779]: I0929 19:08:34.526503 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 19:08:34 crc kubenswrapper[4779]: I0929 19:08:34.526543 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 19:08:34 crc kubenswrapper[4779]: I0929 19:08:34.526576 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 19:08:34 crc kubenswrapper[4779]: I0929 19:08:34.526618 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 19:08:34 crc kubenswrapper[4779]: E0929 19:08:34.526687 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 19:08:38.526651484 +0000 UTC m=+29.411076704 (durationBeforeRetry 4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 19:08:34 crc kubenswrapper[4779]: E0929 19:08:34.526754 4779 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Sep 29 19:08:34 crc kubenswrapper[4779]: E0929 19:08:34.526820 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-09-29 19:08:38.526800158 +0000 UTC m=+29.411225458 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Sep 29 19:08:34 crc kubenswrapper[4779]: E0929 19:08:34.526863 4779 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Sep 29 19:08:34 crc kubenswrapper[4779]: E0929 19:08:34.526865 4779 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Sep 29 19:08:34 crc kubenswrapper[4779]: E0929 19:08:34.526894 4779 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Sep 29 19:08:34 crc kubenswrapper[4779]: E0929 19:08:34.526904 4779 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Sep 29 19:08:34 crc kubenswrapper[4779]: E0929 19:08:34.526915 4779 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 29 19:08:34 crc kubenswrapper[4779]: E0929 19:08:34.526921 4779 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 29 19:08:34 crc kubenswrapper[4779]: E0929 19:08:34.526754 4779 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Sep 29 19:08:34 crc kubenswrapper[4779]: E0929 19:08:34.526979 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-09-29 19:08:38.526960342 +0000 UTC m=+29.411385652 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 29 19:08:34 crc kubenswrapper[4779]: E0929 19:08:34.527004 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-09-29 19:08:38.526993063 +0000 UTC m=+29.411418393 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 29 19:08:34 crc kubenswrapper[4779]: E0929 19:08:34.527022 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-09-29 19:08:38.527013563 +0000 UTC m=+29.411438913 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Sep 29 19:08:34 crc kubenswrapper[4779]: I0929 19:08:34.543694 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"49dd46af-a7fa-47b0-94ab-c9a999564fca\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a67efc46aeb5956568b680b4041d83a55c0410ccefdb4e43b95503c6416e1dfc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2253a3406f6de9463c1c615ad2ac76ce935067baa57b260f18afdb8d9e639e95\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa9b804f65dc9265342d6ec10b8cdd31d49f692289c7fd8292944e25a2c95ebe\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://02428a90ed2dd1c36d800ffda10ed8759d92a87ec8f467cdf9fb11511b9b9420\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:09Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:34Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:34 crc kubenswrapper[4779]: I0929 19:08:34.564691 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ef5a50f788b42b32f4ec04eedc9f5c9dbcd07f64b430ec684d8a91b414e6c18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb910a5c408a63f6c4391cf2fae320e14d6dc945125b53eefde3b666ef9d0754\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:34Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:34 crc kubenswrapper[4779]: I0929 19:08:34.585026 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:34Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:34 crc kubenswrapper[4779]: I0929 19:08:34.604773 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jfbb6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3ac24bbf-c37a-4253-be71-8d8f15cfd48e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2ea6959ae00f851559cc059498a3a7848fbbb55f6a25ed82e38efe62a7ee85ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8ckjq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:30Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jfbb6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:34Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:34 crc kubenswrapper[4779]: I0929 19:08:34.624623 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2f701cb-2e7d-4cea-8455-f68605964ac6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dfa32890cde2a0a47211239f797f83380b4418ad2b8fb1560c779705e68df39c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a017f2621de03e0184a0a168819a6762e4b36a1ae5c66a6bcfd1d65266fcb52\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a9691127cc0437ceebef7a82219adb83625690db4d6bf5cf1c8177a6a9e1284\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b14ae0b08f077b3acbe802e4a5ec06dd10075100c37d5587119a09a92b4651a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b14ae0b08f077b3acbe802e4a5ec06dd10075100c37d5587119a09a92b4651a\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T19:08:29Z\\\",\\\"message\\\":\\\"le observer\\\\nW0929 19:08:29.408025 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0929 19:08:29.408423 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 19:08:29.411188 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1038548135/tls.crt::/tmp/serving-cert-1038548135/tls.key\\\\\\\"\\\\nI0929 19:08:29.848353 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0929 19:08:29.866865 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0929 19:08:29.866980 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0929 19:08:29.867032 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0929 19:08:29.867061 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0929 19:08:29.872666 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0929 19:08:29.872766 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0929 19:08:29.872793 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0929 19:08:29.872817 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0929 19:08:29.872839 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0929 19:08:29.872861 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0929 19:08:29.872883 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0929 19:08:29.873077 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0929 19:08:29.875629 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:24Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f0916d37ad679d190403229b40390af2860e24f777a07cd5ef0ac2f7d3fc3c41\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5a737775c31cf8dc37838d904ee00f1075bc2aa52bea63587806cb7486950eaf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5a737775c31cf8dc37838d904ee00f1075bc2aa52bea63587806cb7486950eaf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:09Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:34Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:34 crc kubenswrapper[4779]: I0929 19:08:34.642003 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:34Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:34 crc kubenswrapper[4779]: I0929 19:08:34.674003 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-42vjg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"046df2ef-fb75-4d32-93e6-17b36af0a7c2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e1e03c63d08ebe2a4b76954140684a5b43da9e92b93490aa7efa715fc2aefb4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e1e03c63d08ebe2a4b76954140684a5b43da9e92b93490aa7efa715fc2aefb4c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:30Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-42vjg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:34Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:34 crc kubenswrapper[4779]: I0929 19:08:34.689936 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-zxpg4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e0db9b28-7e3a-4d44-9e98-1a07c8e5b8d6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:33Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:33Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6ghv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:33Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-zxpg4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:34Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:34 crc kubenswrapper[4779]: I0929 19:08:34.703009 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-7hb2m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"45b89d12-bbd2-4b47-815d-a7421cc1aa00\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://002d12663649198e6136f3e9df16a3b1cd8c64906bf39bde03cd0dde8c36abbc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bvn4g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:30Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-7hb2m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:34Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:34 crc kubenswrapper[4779]: I0929 19:08:34.719663 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:34Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:34 crc kubenswrapper[4779]: I0929 19:08:34.736183 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:34Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:34 crc kubenswrapper[4779]: I0929 19:08:34.754347 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://04d9dd8d319497dd3ed7c565217d8b9d228a1952a3cd0e8dee3cdad781c5770a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:34Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:34 crc kubenswrapper[4779]: I0929 19:08:34.765958 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 19:08:34 crc kubenswrapper[4779]: I0929 19:08:34.765954 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 19:08:34 crc kubenswrapper[4779]: I0929 19:08:34.765963 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 19:08:34 crc kubenswrapper[4779]: E0929 19:08:34.766241 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 19:08:34 crc kubenswrapper[4779]: E0929 19:08:34.766391 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 19:08:34 crc kubenswrapper[4779]: E0929 19:08:34.766526 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 19:08:34 crc kubenswrapper[4779]: I0929 19:08:34.771341 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"476bc421-1113-455e-bcc8-e207e47dad19\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://30844145987b3dae28d2ca3f75a36e21c27216b267ff40fdb83094b8045403c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tb7zk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7168ecfffda2ef4f7782650466ccebb8abcff3293fcd33d44eb4ee24abf6339c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tb7zk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:30Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-d5cnr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:34Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:34 crc kubenswrapper[4779]: I0929 19:08:34.946174 4779 generic.go:334] "Generic (PLEG): container finished" podID="54a33b8e-b623-4f91-be1d-a38dfcef17d7" containerID="edec2f7ae27e72dcc2eac62281dc9b866cdc31ecbda5e24020043aad86a44784" exitCode=0 Sep 29 19:08:34 crc kubenswrapper[4779]: I0929 19:08:34.946294 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-cvx8m" event={"ID":"54a33b8e-b623-4f91-be1d-a38dfcef17d7","Type":"ContainerDied","Data":"edec2f7ae27e72dcc2eac62281dc9b866cdc31ecbda5e24020043aad86a44784"} Sep 29 19:08:34 crc kubenswrapper[4779]: I0929 19:08:34.949143 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-zxpg4" event={"ID":"e0db9b28-7e3a-4d44-9e98-1a07c8e5b8d6","Type":"ContainerStarted","Data":"2f0f837dfb05912fb4f7b0cb08cdb363cb3bfe2cc91a0bebd1b2b2b8128d78cf"} Sep 29 19:08:34 crc kubenswrapper[4779]: I0929 19:08:34.949235 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-zxpg4" event={"ID":"e0db9b28-7e3a-4d44-9e98-1a07c8e5b8d6","Type":"ContainerStarted","Data":"4a19fa7a25fa4a7eaf509057f248dd22ed0b368a54ae4b01a19f291103ac28e4"} Sep 29 19:08:34 crc kubenswrapper[4779]: I0929 19:08:34.951302 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"15b9c6a3d7c79b30673cd8040a9f3033e2405cf2e29b38d8ba2bee9cbb384794"} Sep 29 19:08:34 crc kubenswrapper[4779]: I0929 19:08:34.978116 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:34Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:35 crc kubenswrapper[4779]: I0929 19:08:35.008397 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://04d9dd8d319497dd3ed7c565217d8b9d228a1952a3cd0e8dee3cdad781c5770a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:35Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:35 crc kubenswrapper[4779]: I0929 19:08:35.046650 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"476bc421-1113-455e-bcc8-e207e47dad19\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://30844145987b3dae28d2ca3f75a36e21c27216b267ff40fdb83094b8045403c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tb7zk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7168ecfffda2ef4f7782650466ccebb8abcff3293fcd33d44eb4ee24abf6339c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tb7zk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:30Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-d5cnr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:35Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:35 crc kubenswrapper[4779]: I0929 19:08:35.079691 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-cvx8m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"54a33b8e-b623-4f91-be1d-a38dfcef17d7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e697e7c54a1b8adfde2dfa86399bfd13d895196c12108ac625bd9b94ad913d1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e697e7c54a1b8adfde2dfa86399bfd13d895196c12108ac625bd9b94ad913d1e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://75932bd27c986408eb2a3ec9aefffa392bf1ede4be0ddeabb9fdcd5fcc56199d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://75932bd27c986408eb2a3ec9aefffa392bf1ede4be0ddeabb9fdcd5fcc56199d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e058271bf65bb6f216298a9fc58b97c76335275829be7e5bec59d92688e83a05\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e058271bf65bb6f216298a9fc58b97c76335275829be7e5bec59d92688e83a05\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://edec2f7ae27e72dcc2eac62281dc9b866cdc31ecbda5e24020043aad86a44784\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://edec2f7ae27e72dcc2eac62281dc9b866cdc31ecbda5e24020043aad86a44784\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:30Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-cvx8m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:35Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:35 crc kubenswrapper[4779]: I0929 19:08:35.099625 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"49dd46af-a7fa-47b0-94ab-c9a999564fca\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a67efc46aeb5956568b680b4041d83a55c0410ccefdb4e43b95503c6416e1dfc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2253a3406f6de9463c1c615ad2ac76ce935067baa57b260f18afdb8d9e639e95\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa9b804f65dc9265342d6ec10b8cdd31d49f692289c7fd8292944e25a2c95ebe\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://02428a90ed2dd1c36d800ffda10ed8759d92a87ec8f467cdf9fb11511b9b9420\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:09Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:35Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:35 crc kubenswrapper[4779]: I0929 19:08:35.112564 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ef5a50f788b42b32f4ec04eedc9f5c9dbcd07f64b430ec684d8a91b414e6c18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb910a5c408a63f6c4391cf2fae320e14d6dc945125b53eefde3b666ef9d0754\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:35Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:35 crc kubenswrapper[4779]: I0929 19:08:35.125591 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:35Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:35 crc kubenswrapper[4779]: I0929 19:08:35.138356 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jfbb6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3ac24bbf-c37a-4253-be71-8d8f15cfd48e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2ea6959ae00f851559cc059498a3a7848fbbb55f6a25ed82e38efe62a7ee85ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8ckjq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:30Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jfbb6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:35Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:35 crc kubenswrapper[4779]: I0929 19:08:35.153921 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2f701cb-2e7d-4cea-8455-f68605964ac6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dfa32890cde2a0a47211239f797f83380b4418ad2b8fb1560c779705e68df39c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a017f2621de03e0184a0a168819a6762e4b36a1ae5c66a6bcfd1d65266fcb52\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a9691127cc0437ceebef7a82219adb83625690db4d6bf5cf1c8177a6a9e1284\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b14ae0b08f077b3acbe802e4a5ec06dd10075100c37d5587119a09a92b4651a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b14ae0b08f077b3acbe802e4a5ec06dd10075100c37d5587119a09a92b4651a\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T19:08:29Z\\\",\\\"message\\\":\\\"le observer\\\\nW0929 19:08:29.408025 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0929 19:08:29.408423 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 19:08:29.411188 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1038548135/tls.crt::/tmp/serving-cert-1038548135/tls.key\\\\\\\"\\\\nI0929 19:08:29.848353 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0929 19:08:29.866865 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0929 19:08:29.866980 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0929 19:08:29.867032 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0929 19:08:29.867061 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0929 19:08:29.872666 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0929 19:08:29.872766 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0929 19:08:29.872793 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0929 19:08:29.872817 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0929 19:08:29.872839 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0929 19:08:29.872861 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0929 19:08:29.872883 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0929 19:08:29.873077 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0929 19:08:29.875629 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:24Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f0916d37ad679d190403229b40390af2860e24f777a07cd5ef0ac2f7d3fc3c41\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5a737775c31cf8dc37838d904ee00f1075bc2aa52bea63587806cb7486950eaf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5a737775c31cf8dc37838d904ee00f1075bc2aa52bea63587806cb7486950eaf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:09Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:35Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:35 crc kubenswrapper[4779]: I0929 19:08:35.170822 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:35Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:35 crc kubenswrapper[4779]: I0929 19:08:35.190692 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-42vjg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"046df2ef-fb75-4d32-93e6-17b36af0a7c2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e1e03c63d08ebe2a4b76954140684a5b43da9e92b93490aa7efa715fc2aefb4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e1e03c63d08ebe2a4b76954140684a5b43da9e92b93490aa7efa715fc2aefb4c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:30Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-42vjg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:35Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:35 crc kubenswrapper[4779]: I0929 19:08:35.204156 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-zxpg4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e0db9b28-7e3a-4d44-9e98-1a07c8e5b8d6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:33Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:33Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6ghv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:33Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-zxpg4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:35Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:35 crc kubenswrapper[4779]: I0929 19:08:35.219805 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-7hb2m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"45b89d12-bbd2-4b47-815d-a7421cc1aa00\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://002d12663649198e6136f3e9df16a3b1cd8c64906bf39bde03cd0dde8c36abbc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bvn4g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:30Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-7hb2m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:35Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:35 crc kubenswrapper[4779]: I0929 19:08:35.232087 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:35Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:35 crc kubenswrapper[4779]: I0929 19:08:35.246768 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-7hb2m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"45b89d12-bbd2-4b47-815d-a7421cc1aa00\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://002d12663649198e6136f3e9df16a3b1cd8c64906bf39bde03cd0dde8c36abbc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bvn4g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:30Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-7hb2m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:35Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:35 crc kubenswrapper[4779]: I0929 19:08:35.260853 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:35Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:35 crc kubenswrapper[4779]: I0929 19:08:35.280400 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:35Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:35 crc kubenswrapper[4779]: I0929 19:08:35.295463 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://04d9dd8d319497dd3ed7c565217d8b9d228a1952a3cd0e8dee3cdad781c5770a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:35Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:35 crc kubenswrapper[4779]: I0929 19:08:35.308914 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"476bc421-1113-455e-bcc8-e207e47dad19\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://30844145987b3dae28d2ca3f75a36e21c27216b267ff40fdb83094b8045403c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tb7zk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7168ecfffda2ef4f7782650466ccebb8abcff3293fcd33d44eb4ee24abf6339c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tb7zk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:30Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-d5cnr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:35Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:35 crc kubenswrapper[4779]: I0929 19:08:35.323128 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"49dd46af-a7fa-47b0-94ab-c9a999564fca\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a67efc46aeb5956568b680b4041d83a55c0410ccefdb4e43b95503c6416e1dfc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2253a3406f6de9463c1c615ad2ac76ce935067baa57b260f18afdb8d9e639e95\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa9b804f65dc9265342d6ec10b8cdd31d49f692289c7fd8292944e25a2c95ebe\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://02428a90ed2dd1c36d800ffda10ed8759d92a87ec8f467cdf9fb11511b9b9420\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:09Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:35Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:35 crc kubenswrapper[4779]: I0929 19:08:35.335286 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ef5a50f788b42b32f4ec04eedc9f5c9dbcd07f64b430ec684d8a91b414e6c18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb910a5c408a63f6c4391cf2fae320e14d6dc945125b53eefde3b666ef9d0754\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:35Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:35 crc kubenswrapper[4779]: I0929 19:08:35.346513 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://15b9c6a3d7c79b30673cd8040a9f3033e2405cf2e29b38d8ba2bee9cbb384794\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:35Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:35 crc kubenswrapper[4779]: I0929 19:08:35.360187 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jfbb6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3ac24bbf-c37a-4253-be71-8d8f15cfd48e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2ea6959ae00f851559cc059498a3a7848fbbb55f6a25ed82e38efe62a7ee85ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8ckjq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:30Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jfbb6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:35Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:35 crc kubenswrapper[4779]: I0929 19:08:35.374555 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-cvx8m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"54a33b8e-b623-4f91-be1d-a38dfcef17d7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e697e7c54a1b8adfde2dfa86399bfd13d895196c12108ac625bd9b94ad913d1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e697e7c54a1b8adfde2dfa86399bfd13d895196c12108ac625bd9b94ad913d1e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://75932bd27c986408eb2a3ec9aefffa392bf1ede4be0ddeabb9fdcd5fcc56199d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://75932bd27c986408eb2a3ec9aefffa392bf1ede4be0ddeabb9fdcd5fcc56199d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e058271bf65bb6f216298a9fc58b97c76335275829be7e5bec59d92688e83a05\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e058271bf65bb6f216298a9fc58b97c76335275829be7e5bec59d92688e83a05\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://edec2f7ae27e72dcc2eac62281dc9b866cdc31ecbda5e24020043aad86a44784\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://edec2f7ae27e72dcc2eac62281dc9b866cdc31ecbda5e24020043aad86a44784\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:30Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-cvx8m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:35Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:35 crc kubenswrapper[4779]: I0929 19:08:35.390013 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2f701cb-2e7d-4cea-8455-f68605964ac6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dfa32890cde2a0a47211239f797f83380b4418ad2b8fb1560c779705e68df39c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a017f2621de03e0184a0a168819a6762e4b36a1ae5c66a6bcfd1d65266fcb52\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a9691127cc0437ceebef7a82219adb83625690db4d6bf5cf1c8177a6a9e1284\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b14ae0b08f077b3acbe802e4a5ec06dd10075100c37d5587119a09a92b4651a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b14ae0b08f077b3acbe802e4a5ec06dd10075100c37d5587119a09a92b4651a\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T19:08:29Z\\\",\\\"message\\\":\\\"le observer\\\\nW0929 19:08:29.408025 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0929 19:08:29.408423 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 19:08:29.411188 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1038548135/tls.crt::/tmp/serving-cert-1038548135/tls.key\\\\\\\"\\\\nI0929 19:08:29.848353 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0929 19:08:29.866865 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0929 19:08:29.866980 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0929 19:08:29.867032 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0929 19:08:29.867061 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0929 19:08:29.872666 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0929 19:08:29.872766 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0929 19:08:29.872793 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0929 19:08:29.872817 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0929 19:08:29.872839 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0929 19:08:29.872861 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0929 19:08:29.872883 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0929 19:08:29.873077 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0929 19:08:29.875629 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:24Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f0916d37ad679d190403229b40390af2860e24f777a07cd5ef0ac2f7d3fc3c41\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5a737775c31cf8dc37838d904ee00f1075bc2aa52bea63587806cb7486950eaf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5a737775c31cf8dc37838d904ee00f1075bc2aa52bea63587806cb7486950eaf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:09Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:35Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:35 crc kubenswrapper[4779]: I0929 19:08:35.406578 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:35Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:35 crc kubenswrapper[4779]: I0929 19:08:35.431019 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-42vjg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"046df2ef-fb75-4d32-93e6-17b36af0a7c2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e1e03c63d08ebe2a4b76954140684a5b43da9e92b93490aa7efa715fc2aefb4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e1e03c63d08ebe2a4b76954140684a5b43da9e92b93490aa7efa715fc2aefb4c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:30Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-42vjg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:35Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:35 crc kubenswrapper[4779]: I0929 19:08:35.463215 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-zxpg4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e0db9b28-7e3a-4d44-9e98-1a07c8e5b8d6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2f0f837dfb05912fb4f7b0cb08cdb363cb3bfe2cc91a0bebd1b2b2b8128d78cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6ghv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:33Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-zxpg4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:35Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:35 crc kubenswrapper[4779]: I0929 19:08:35.749572 4779 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 19:08:35 crc kubenswrapper[4779]: I0929 19:08:35.752488 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:35 crc kubenswrapper[4779]: I0929 19:08:35.752703 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:35 crc kubenswrapper[4779]: I0929 19:08:35.752846 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:35 crc kubenswrapper[4779]: I0929 19:08:35.753066 4779 kubelet_node_status.go:76] "Attempting to register node" node="crc" Sep 29 19:08:35 crc kubenswrapper[4779]: I0929 19:08:35.764297 4779 kubelet_node_status.go:115] "Node was previously registered" node="crc" Sep 29 19:08:35 crc kubenswrapper[4779]: I0929 19:08:35.764585 4779 kubelet_node_status.go:79] "Successfully registered node" node="crc" Sep 29 19:08:35 crc kubenswrapper[4779]: I0929 19:08:35.765892 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:35 crc kubenswrapper[4779]: I0929 19:08:35.765950 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:35 crc kubenswrapper[4779]: I0929 19:08:35.765968 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:35 crc kubenswrapper[4779]: I0929 19:08:35.765990 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:08:35 crc kubenswrapper[4779]: I0929 19:08:35.766007 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:08:35Z","lastTransitionTime":"2025-09-29T19:08:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:08:35 crc kubenswrapper[4779]: E0929 19:08:35.781522 4779 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T19:08:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:35Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T19:08:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:35Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T19:08:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:35Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T19:08:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:35Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c0f08dfd-4d0c-4b55-a30c-6725bfe13689\\\",\\\"systemUUID\\\":\\\"d61591a8-214b-4be1-8c58-e9ade5216b62\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:35Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:35 crc kubenswrapper[4779]: I0929 19:08:35.786097 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:35 crc kubenswrapper[4779]: I0929 19:08:35.786188 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:35 crc kubenswrapper[4779]: I0929 19:08:35.786207 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:35 crc kubenswrapper[4779]: I0929 19:08:35.786232 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:08:35 crc kubenswrapper[4779]: I0929 19:08:35.786251 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:08:35Z","lastTransitionTime":"2025-09-29T19:08:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:08:35 crc kubenswrapper[4779]: E0929 19:08:35.801820 4779 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T19:08:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:35Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T19:08:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:35Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T19:08:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:35Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T19:08:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:35Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c0f08dfd-4d0c-4b55-a30c-6725bfe13689\\\",\\\"systemUUID\\\":\\\"d61591a8-214b-4be1-8c58-e9ade5216b62\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:35Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:35 crc kubenswrapper[4779]: I0929 19:08:35.809757 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:35 crc kubenswrapper[4779]: I0929 19:08:35.809914 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:35 crc kubenswrapper[4779]: I0929 19:08:35.810000 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:35 crc kubenswrapper[4779]: I0929 19:08:35.810137 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:08:35 crc kubenswrapper[4779]: I0929 19:08:35.810225 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:08:35Z","lastTransitionTime":"2025-09-29T19:08:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:08:35 crc kubenswrapper[4779]: E0929 19:08:35.831022 4779 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T19:08:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:35Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T19:08:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:35Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T19:08:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:35Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T19:08:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:35Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c0f08dfd-4d0c-4b55-a30c-6725bfe13689\\\",\\\"systemUUID\\\":\\\"d61591a8-214b-4be1-8c58-e9ade5216b62\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:35Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:35 crc kubenswrapper[4779]: I0929 19:08:35.835032 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:35 crc kubenswrapper[4779]: I0929 19:08:35.835268 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:35 crc kubenswrapper[4779]: I0929 19:08:35.835460 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:35 crc kubenswrapper[4779]: I0929 19:08:35.835587 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:08:35 crc kubenswrapper[4779]: I0929 19:08:35.835716 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:08:35Z","lastTransitionTime":"2025-09-29T19:08:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:08:35 crc kubenswrapper[4779]: E0929 19:08:35.855046 4779 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T19:08:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:35Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T19:08:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:35Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T19:08:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:35Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T19:08:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:35Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c0f08dfd-4d0c-4b55-a30c-6725bfe13689\\\",\\\"systemUUID\\\":\\\"d61591a8-214b-4be1-8c58-e9ade5216b62\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:35Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:35 crc kubenswrapper[4779]: I0929 19:08:35.858944 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:35 crc kubenswrapper[4779]: I0929 19:08:35.859120 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:35 crc kubenswrapper[4779]: I0929 19:08:35.859249 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:35 crc kubenswrapper[4779]: I0929 19:08:35.859444 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:08:35 crc kubenswrapper[4779]: I0929 19:08:35.859540 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:08:35Z","lastTransitionTime":"2025-09-29T19:08:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:08:35 crc kubenswrapper[4779]: E0929 19:08:35.874681 4779 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T19:08:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:35Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T19:08:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:35Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T19:08:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:35Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T19:08:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:35Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c0f08dfd-4d0c-4b55-a30c-6725bfe13689\\\",\\\"systemUUID\\\":\\\"d61591a8-214b-4be1-8c58-e9ade5216b62\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:35Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:35 crc kubenswrapper[4779]: E0929 19:08:35.874809 4779 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Sep 29 19:08:35 crc kubenswrapper[4779]: I0929 19:08:35.876129 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:35 crc kubenswrapper[4779]: I0929 19:08:35.876153 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:35 crc kubenswrapper[4779]: I0929 19:08:35.876163 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:35 crc kubenswrapper[4779]: I0929 19:08:35.876178 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:08:35 crc kubenswrapper[4779]: I0929 19:08:35.876189 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:08:35Z","lastTransitionTime":"2025-09-29T19:08:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:08:35 crc kubenswrapper[4779]: I0929 19:08:35.959265 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-42vjg" event={"ID":"046df2ef-fb75-4d32-93e6-17b36af0a7c2","Type":"ContainerStarted","Data":"c8993f5be0b77b752f60e7346f609aaeef20db2f0e72cbebde06a11fa7d2f5bb"} Sep 29 19:08:35 crc kubenswrapper[4779]: I0929 19:08:35.962692 4779 generic.go:334] "Generic (PLEG): container finished" podID="54a33b8e-b623-4f91-be1d-a38dfcef17d7" containerID="1750034ffcb61628b437e7b4f604be66ea6401d0ddb8a70f37fbb358c261670f" exitCode=0 Sep 29 19:08:35 crc kubenswrapper[4779]: I0929 19:08:35.962761 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-cvx8m" event={"ID":"54a33b8e-b623-4f91-be1d-a38dfcef17d7","Type":"ContainerDied","Data":"1750034ffcb61628b437e7b4f604be66ea6401d0ddb8a70f37fbb358c261670f"} Sep 29 19:08:35 crc kubenswrapper[4779]: I0929 19:08:35.978764 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:35 crc kubenswrapper[4779]: I0929 19:08:35.979046 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:35 crc kubenswrapper[4779]: I0929 19:08:35.979135 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:35 crc kubenswrapper[4779]: I0929 19:08:35.979230 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:08:35 crc kubenswrapper[4779]: I0929 19:08:35.979350 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:08:35Z","lastTransitionTime":"2025-09-29T19:08:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:08:35 crc kubenswrapper[4779]: I0929 19:08:35.983141 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-7hb2m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"45b89d12-bbd2-4b47-815d-a7421cc1aa00\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://002d12663649198e6136f3e9df16a3b1cd8c64906bf39bde03cd0dde8c36abbc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bvn4g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:30Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-7hb2m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:35Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:36 crc kubenswrapper[4779]: I0929 19:08:36.006882 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:36Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:36 crc kubenswrapper[4779]: I0929 19:08:36.028149 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:36Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:36 crc kubenswrapper[4779]: I0929 19:08:36.050842 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://04d9dd8d319497dd3ed7c565217d8b9d228a1952a3cd0e8dee3cdad781c5770a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:36Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:36 crc kubenswrapper[4779]: I0929 19:08:36.066891 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"476bc421-1113-455e-bcc8-e207e47dad19\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://30844145987b3dae28d2ca3f75a36e21c27216b267ff40fdb83094b8045403c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tb7zk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7168ecfffda2ef4f7782650466ccebb8abcff3293fcd33d44eb4ee24abf6339c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tb7zk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:30Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-d5cnr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:36Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:36 crc kubenswrapper[4779]: I0929 19:08:36.080456 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jfbb6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3ac24bbf-c37a-4253-be71-8d8f15cfd48e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2ea6959ae00f851559cc059498a3a7848fbbb55f6a25ed82e38efe62a7ee85ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8ckjq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:30Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jfbb6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:36Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:36 crc kubenswrapper[4779]: I0929 19:08:36.081906 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:36 crc kubenswrapper[4779]: I0929 19:08:36.081953 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:36 crc kubenswrapper[4779]: I0929 19:08:36.081993 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:36 crc kubenswrapper[4779]: I0929 19:08:36.082011 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:08:36 crc kubenswrapper[4779]: I0929 19:08:36.082024 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:08:36Z","lastTransitionTime":"2025-09-29T19:08:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:08:36 crc kubenswrapper[4779]: I0929 19:08:36.098217 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-cvx8m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"54a33b8e-b623-4f91-be1d-a38dfcef17d7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e697e7c54a1b8adfde2dfa86399bfd13d895196c12108ac625bd9b94ad913d1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e697e7c54a1b8adfde2dfa86399bfd13d895196c12108ac625bd9b94ad913d1e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://75932bd27c986408eb2a3ec9aefffa392bf1ede4be0ddeabb9fdcd5fcc56199d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://75932bd27c986408eb2a3ec9aefffa392bf1ede4be0ddeabb9fdcd5fcc56199d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e058271bf65bb6f216298a9fc58b97c76335275829be7e5bec59d92688e83a05\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e058271bf65bb6f216298a9fc58b97c76335275829be7e5bec59d92688e83a05\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://edec2f7ae27e72dcc2eac62281dc9b866cdc31ecbda5e24020043aad86a44784\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://edec2f7ae27e72dcc2eac62281dc9b866cdc31ecbda5e24020043aad86a44784\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1750034ffcb61628b437e7b4f604be66ea6401d0ddb8a70f37fbb358c261670f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1750034ffcb61628b437e7b4f604be66ea6401d0ddb8a70f37fbb358c261670f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:30Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-cvx8m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:36Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:36 crc kubenswrapper[4779]: I0929 19:08:36.117166 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"49dd46af-a7fa-47b0-94ab-c9a999564fca\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a67efc46aeb5956568b680b4041d83a55c0410ccefdb4e43b95503c6416e1dfc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2253a3406f6de9463c1c615ad2ac76ce935067baa57b260f18afdb8d9e639e95\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa9b804f65dc9265342d6ec10b8cdd31d49f692289c7fd8292944e25a2c95ebe\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://02428a90ed2dd1c36d800ffda10ed8759d92a87ec8f467cdf9fb11511b9b9420\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:09Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:36Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:36 crc kubenswrapper[4779]: I0929 19:08:36.133610 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ef5a50f788b42b32f4ec04eedc9f5c9dbcd07f64b430ec684d8a91b414e6c18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb910a5c408a63f6c4391cf2fae320e14d6dc945125b53eefde3b666ef9d0754\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:36Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:36 crc kubenswrapper[4779]: I0929 19:08:36.149505 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://15b9c6a3d7c79b30673cd8040a9f3033e2405cf2e29b38d8ba2bee9cbb384794\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:36Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:36 crc kubenswrapper[4779]: I0929 19:08:36.162760 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-zxpg4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e0db9b28-7e3a-4d44-9e98-1a07c8e5b8d6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2f0f837dfb05912fb4f7b0cb08cdb363cb3bfe2cc91a0bebd1b2b2b8128d78cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6ghv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:33Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-zxpg4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:36Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:36 crc kubenswrapper[4779]: I0929 19:08:36.177177 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2f701cb-2e7d-4cea-8455-f68605964ac6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dfa32890cde2a0a47211239f797f83380b4418ad2b8fb1560c779705e68df39c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a017f2621de03e0184a0a168819a6762e4b36a1ae5c66a6bcfd1d65266fcb52\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a9691127cc0437ceebef7a82219adb83625690db4d6bf5cf1c8177a6a9e1284\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b14ae0b08f077b3acbe802e4a5ec06dd10075100c37d5587119a09a92b4651a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b14ae0b08f077b3acbe802e4a5ec06dd10075100c37d5587119a09a92b4651a\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T19:08:29Z\\\",\\\"message\\\":\\\"le observer\\\\nW0929 19:08:29.408025 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0929 19:08:29.408423 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 19:08:29.411188 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1038548135/tls.crt::/tmp/serving-cert-1038548135/tls.key\\\\\\\"\\\\nI0929 19:08:29.848353 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0929 19:08:29.866865 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0929 19:08:29.866980 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0929 19:08:29.867032 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0929 19:08:29.867061 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0929 19:08:29.872666 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0929 19:08:29.872766 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0929 19:08:29.872793 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0929 19:08:29.872817 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0929 19:08:29.872839 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0929 19:08:29.872861 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0929 19:08:29.872883 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0929 19:08:29.873077 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0929 19:08:29.875629 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:24Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f0916d37ad679d190403229b40390af2860e24f777a07cd5ef0ac2f7d3fc3c41\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5a737775c31cf8dc37838d904ee00f1075bc2aa52bea63587806cb7486950eaf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5a737775c31cf8dc37838d904ee00f1075bc2aa52bea63587806cb7486950eaf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:09Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:36Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:36 crc kubenswrapper[4779]: I0929 19:08:36.184819 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:36 crc kubenswrapper[4779]: I0929 19:08:36.184849 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:36 crc kubenswrapper[4779]: I0929 19:08:36.184858 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:36 crc kubenswrapper[4779]: I0929 19:08:36.184878 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:08:36 crc kubenswrapper[4779]: I0929 19:08:36.184889 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:08:36Z","lastTransitionTime":"2025-09-29T19:08:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:08:36 crc kubenswrapper[4779]: I0929 19:08:36.196071 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:36Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:36 crc kubenswrapper[4779]: I0929 19:08:36.224099 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-42vjg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"046df2ef-fb75-4d32-93e6-17b36af0a7c2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e1e03c63d08ebe2a4b76954140684a5b43da9e92b93490aa7efa715fc2aefb4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e1e03c63d08ebe2a4b76954140684a5b43da9e92b93490aa7efa715fc2aefb4c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:30Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-42vjg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:36Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:36 crc kubenswrapper[4779]: I0929 19:08:36.287827 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:36 crc kubenswrapper[4779]: I0929 19:08:36.287886 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:36 crc kubenswrapper[4779]: I0929 19:08:36.287897 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:36 crc kubenswrapper[4779]: I0929 19:08:36.287920 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:08:36 crc kubenswrapper[4779]: I0929 19:08:36.287935 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:08:36Z","lastTransitionTime":"2025-09-29T19:08:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:08:36 crc kubenswrapper[4779]: I0929 19:08:36.391596 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:36 crc kubenswrapper[4779]: I0929 19:08:36.391652 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:36 crc kubenswrapper[4779]: I0929 19:08:36.391664 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:36 crc kubenswrapper[4779]: I0929 19:08:36.391689 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:08:36 crc kubenswrapper[4779]: I0929 19:08:36.391701 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:08:36Z","lastTransitionTime":"2025-09-29T19:08:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:08:36 crc kubenswrapper[4779]: I0929 19:08:36.495470 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:36 crc kubenswrapper[4779]: I0929 19:08:36.495558 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:36 crc kubenswrapper[4779]: I0929 19:08:36.495582 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:36 crc kubenswrapper[4779]: I0929 19:08:36.495611 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:08:36 crc kubenswrapper[4779]: I0929 19:08:36.495633 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:08:36Z","lastTransitionTime":"2025-09-29T19:08:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:08:36 crc kubenswrapper[4779]: I0929 19:08:36.599395 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:36 crc kubenswrapper[4779]: I0929 19:08:36.599464 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:36 crc kubenswrapper[4779]: I0929 19:08:36.599478 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:36 crc kubenswrapper[4779]: I0929 19:08:36.599503 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:08:36 crc kubenswrapper[4779]: I0929 19:08:36.599519 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:08:36Z","lastTransitionTime":"2025-09-29T19:08:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:08:36 crc kubenswrapper[4779]: I0929 19:08:36.702605 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:36 crc kubenswrapper[4779]: I0929 19:08:36.702671 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:36 crc kubenswrapper[4779]: I0929 19:08:36.702690 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:36 crc kubenswrapper[4779]: I0929 19:08:36.702711 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:08:36 crc kubenswrapper[4779]: I0929 19:08:36.702723 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:08:36Z","lastTransitionTime":"2025-09-29T19:08:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:08:36 crc kubenswrapper[4779]: I0929 19:08:36.766345 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 19:08:36 crc kubenswrapper[4779]: I0929 19:08:36.766400 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 19:08:36 crc kubenswrapper[4779]: I0929 19:08:36.766345 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 19:08:36 crc kubenswrapper[4779]: E0929 19:08:36.766486 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 19:08:36 crc kubenswrapper[4779]: E0929 19:08:36.766539 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 19:08:36 crc kubenswrapper[4779]: E0929 19:08:36.766612 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 19:08:36 crc kubenswrapper[4779]: I0929 19:08:36.806377 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:36 crc kubenswrapper[4779]: I0929 19:08:36.806427 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:36 crc kubenswrapper[4779]: I0929 19:08:36.806437 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:36 crc kubenswrapper[4779]: I0929 19:08:36.806456 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:08:36 crc kubenswrapper[4779]: I0929 19:08:36.806471 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:08:36Z","lastTransitionTime":"2025-09-29T19:08:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:08:36 crc kubenswrapper[4779]: I0929 19:08:36.909430 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:36 crc kubenswrapper[4779]: I0929 19:08:36.909500 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:36 crc kubenswrapper[4779]: I0929 19:08:36.909522 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:36 crc kubenswrapper[4779]: I0929 19:08:36.909548 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:08:36 crc kubenswrapper[4779]: I0929 19:08:36.909571 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:08:36Z","lastTransitionTime":"2025-09-29T19:08:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:08:36 crc kubenswrapper[4779]: I0929 19:08:36.970547 4779 generic.go:334] "Generic (PLEG): container finished" podID="54a33b8e-b623-4f91-be1d-a38dfcef17d7" containerID="3409883fc5d9ed6b5e08d50c9d2821a4ac0b8932c9a0296c12065b0abb0354d6" exitCode=0 Sep 29 19:08:36 crc kubenswrapper[4779]: I0929 19:08:36.970704 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-cvx8m" event={"ID":"54a33b8e-b623-4f91-be1d-a38dfcef17d7","Type":"ContainerDied","Data":"3409883fc5d9ed6b5e08d50c9d2821a4ac0b8932c9a0296c12065b0abb0354d6"} Sep 29 19:08:36 crc kubenswrapper[4779]: I0929 19:08:36.988638 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-7hb2m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"45b89d12-bbd2-4b47-815d-a7421cc1aa00\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://002d12663649198e6136f3e9df16a3b1cd8c64906bf39bde03cd0dde8c36abbc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bvn4g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:30Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-7hb2m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:36Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:37 crc kubenswrapper[4779]: I0929 19:08:37.004258 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:37Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:37 crc kubenswrapper[4779]: I0929 19:08:37.011818 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:37 crc kubenswrapper[4779]: I0929 19:08:37.011885 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:37 crc kubenswrapper[4779]: I0929 19:08:37.011901 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:37 crc kubenswrapper[4779]: I0929 19:08:37.011924 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:08:37 crc kubenswrapper[4779]: I0929 19:08:37.011940 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:08:37Z","lastTransitionTime":"2025-09-29T19:08:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:08:37 crc kubenswrapper[4779]: I0929 19:08:37.019501 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:37Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:37 crc kubenswrapper[4779]: I0929 19:08:37.037958 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://04d9dd8d319497dd3ed7c565217d8b9d228a1952a3cd0e8dee3cdad781c5770a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:37Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:37 crc kubenswrapper[4779]: I0929 19:08:37.053230 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"476bc421-1113-455e-bcc8-e207e47dad19\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://30844145987b3dae28d2ca3f75a36e21c27216b267ff40fdb83094b8045403c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tb7zk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7168ecfffda2ef4f7782650466ccebb8abcff3293fcd33d44eb4ee24abf6339c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tb7zk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:30Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-d5cnr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:37Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:37 crc kubenswrapper[4779]: I0929 19:08:37.068795 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"49dd46af-a7fa-47b0-94ab-c9a999564fca\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a67efc46aeb5956568b680b4041d83a55c0410ccefdb4e43b95503c6416e1dfc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2253a3406f6de9463c1c615ad2ac76ce935067baa57b260f18afdb8d9e639e95\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa9b804f65dc9265342d6ec10b8cdd31d49f692289c7fd8292944e25a2c95ebe\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://02428a90ed2dd1c36d800ffda10ed8759d92a87ec8f467cdf9fb11511b9b9420\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:09Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:37Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:37 crc kubenswrapper[4779]: I0929 19:08:37.080872 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ef5a50f788b42b32f4ec04eedc9f5c9dbcd07f64b430ec684d8a91b414e6c18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb910a5c408a63f6c4391cf2fae320e14d6dc945125b53eefde3b666ef9d0754\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:37Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:37 crc kubenswrapper[4779]: I0929 19:08:37.091971 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://15b9c6a3d7c79b30673cd8040a9f3033e2405cf2e29b38d8ba2bee9cbb384794\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:37Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:37 crc kubenswrapper[4779]: I0929 19:08:37.104221 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jfbb6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3ac24bbf-c37a-4253-be71-8d8f15cfd48e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2ea6959ae00f851559cc059498a3a7848fbbb55f6a25ed82e38efe62a7ee85ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8ckjq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:30Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jfbb6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:37Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:37 crc kubenswrapper[4779]: I0929 19:08:37.114373 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:37 crc kubenswrapper[4779]: I0929 19:08:37.114419 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:37 crc kubenswrapper[4779]: I0929 19:08:37.114431 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:37 crc kubenswrapper[4779]: I0929 19:08:37.114449 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:08:37 crc kubenswrapper[4779]: I0929 19:08:37.114461 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:08:37Z","lastTransitionTime":"2025-09-29T19:08:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:08:37 crc kubenswrapper[4779]: I0929 19:08:37.118716 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-cvx8m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"54a33b8e-b623-4f91-be1d-a38dfcef17d7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e697e7c54a1b8adfde2dfa86399bfd13d895196c12108ac625bd9b94ad913d1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e697e7c54a1b8adfde2dfa86399bfd13d895196c12108ac625bd9b94ad913d1e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://75932bd27c986408eb2a3ec9aefffa392bf1ede4be0ddeabb9fdcd5fcc56199d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://75932bd27c986408eb2a3ec9aefffa392bf1ede4be0ddeabb9fdcd5fcc56199d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e058271bf65bb6f216298a9fc58b97c76335275829be7e5bec59d92688e83a05\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e058271bf65bb6f216298a9fc58b97c76335275829be7e5bec59d92688e83a05\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://edec2f7ae27e72dcc2eac62281dc9b866cdc31ecbda5e24020043aad86a44784\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://edec2f7ae27e72dcc2eac62281dc9b866cdc31ecbda5e24020043aad86a44784\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1750034ffcb61628b437e7b4f604be66ea6401d0ddb8a70f37fbb358c261670f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1750034ffcb61628b437e7b4f604be66ea6401d0ddb8a70f37fbb358c261670f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3409883fc5d9ed6b5e08d50c9d2821a4ac0b8932c9a0296c12065b0abb0354d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3409883fc5d9ed6b5e08d50c9d2821a4ac0b8932c9a0296c12065b0abb0354d6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:30Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-cvx8m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:37Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:37 crc kubenswrapper[4779]: I0929 19:08:37.132781 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2f701cb-2e7d-4cea-8455-f68605964ac6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dfa32890cde2a0a47211239f797f83380b4418ad2b8fb1560c779705e68df39c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a017f2621de03e0184a0a168819a6762e4b36a1ae5c66a6bcfd1d65266fcb52\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a9691127cc0437ceebef7a82219adb83625690db4d6bf5cf1c8177a6a9e1284\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b14ae0b08f077b3acbe802e4a5ec06dd10075100c37d5587119a09a92b4651a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b14ae0b08f077b3acbe802e4a5ec06dd10075100c37d5587119a09a92b4651a\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T19:08:29Z\\\",\\\"message\\\":\\\"le observer\\\\nW0929 19:08:29.408025 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0929 19:08:29.408423 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 19:08:29.411188 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1038548135/tls.crt::/tmp/serving-cert-1038548135/tls.key\\\\\\\"\\\\nI0929 19:08:29.848353 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0929 19:08:29.866865 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0929 19:08:29.866980 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0929 19:08:29.867032 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0929 19:08:29.867061 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0929 19:08:29.872666 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0929 19:08:29.872766 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0929 19:08:29.872793 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0929 19:08:29.872817 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0929 19:08:29.872839 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0929 19:08:29.872861 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0929 19:08:29.872883 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0929 19:08:29.873077 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0929 19:08:29.875629 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:24Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f0916d37ad679d190403229b40390af2860e24f777a07cd5ef0ac2f7d3fc3c41\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5a737775c31cf8dc37838d904ee00f1075bc2aa52bea63587806cb7486950eaf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5a737775c31cf8dc37838d904ee00f1075bc2aa52bea63587806cb7486950eaf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:09Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:37Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:37 crc kubenswrapper[4779]: I0929 19:08:37.144635 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:37Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:37 crc kubenswrapper[4779]: I0929 19:08:37.165918 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-42vjg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"046df2ef-fb75-4d32-93e6-17b36af0a7c2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e1e03c63d08ebe2a4b76954140684a5b43da9e92b93490aa7efa715fc2aefb4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e1e03c63d08ebe2a4b76954140684a5b43da9e92b93490aa7efa715fc2aefb4c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:30Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-42vjg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:37Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:37 crc kubenswrapper[4779]: I0929 19:08:37.178966 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-zxpg4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e0db9b28-7e3a-4d44-9e98-1a07c8e5b8d6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2f0f837dfb05912fb4f7b0cb08cdb363cb3bfe2cc91a0bebd1b2b2b8128d78cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6ghv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:33Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-zxpg4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:37Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:37 crc kubenswrapper[4779]: I0929 19:08:37.216695 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:37 crc kubenswrapper[4779]: I0929 19:08:37.216747 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:37 crc kubenswrapper[4779]: I0929 19:08:37.216761 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:37 crc kubenswrapper[4779]: I0929 19:08:37.216779 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:08:37 crc kubenswrapper[4779]: I0929 19:08:37.216790 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:08:37Z","lastTransitionTime":"2025-09-29T19:08:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:08:37 crc kubenswrapper[4779]: I0929 19:08:37.318969 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:37 crc kubenswrapper[4779]: I0929 19:08:37.319004 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:37 crc kubenswrapper[4779]: I0929 19:08:37.319013 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:37 crc kubenswrapper[4779]: I0929 19:08:37.319026 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:08:37 crc kubenswrapper[4779]: I0929 19:08:37.319035 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:08:37Z","lastTransitionTime":"2025-09-29T19:08:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:08:37 crc kubenswrapper[4779]: I0929 19:08:37.421903 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:37 crc kubenswrapper[4779]: I0929 19:08:37.421949 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:37 crc kubenswrapper[4779]: I0929 19:08:37.421965 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:37 crc kubenswrapper[4779]: I0929 19:08:37.421986 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:08:37 crc kubenswrapper[4779]: I0929 19:08:37.422004 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:08:37Z","lastTransitionTime":"2025-09-29T19:08:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:08:37 crc kubenswrapper[4779]: I0929 19:08:37.524423 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:37 crc kubenswrapper[4779]: I0929 19:08:37.524477 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:37 crc kubenswrapper[4779]: I0929 19:08:37.524486 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:37 crc kubenswrapper[4779]: I0929 19:08:37.524502 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:08:37 crc kubenswrapper[4779]: I0929 19:08:37.524513 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:08:37Z","lastTransitionTime":"2025-09-29T19:08:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:08:37 crc kubenswrapper[4779]: I0929 19:08:37.626968 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:37 crc kubenswrapper[4779]: I0929 19:08:37.627037 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:37 crc kubenswrapper[4779]: I0929 19:08:37.627056 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:37 crc kubenswrapper[4779]: I0929 19:08:37.627082 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:08:37 crc kubenswrapper[4779]: I0929 19:08:37.627101 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:08:37Z","lastTransitionTime":"2025-09-29T19:08:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:08:37 crc kubenswrapper[4779]: I0929 19:08:37.729426 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:37 crc kubenswrapper[4779]: I0929 19:08:37.729462 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:37 crc kubenswrapper[4779]: I0929 19:08:37.729471 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:37 crc kubenswrapper[4779]: I0929 19:08:37.729487 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:08:37 crc kubenswrapper[4779]: I0929 19:08:37.729497 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:08:37Z","lastTransitionTime":"2025-09-29T19:08:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:08:37 crc kubenswrapper[4779]: I0929 19:08:37.832371 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:37 crc kubenswrapper[4779]: I0929 19:08:37.832405 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:37 crc kubenswrapper[4779]: I0929 19:08:37.832427 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:37 crc kubenswrapper[4779]: I0929 19:08:37.832442 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:08:37 crc kubenswrapper[4779]: I0929 19:08:37.832454 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:08:37Z","lastTransitionTime":"2025-09-29T19:08:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:08:37 crc kubenswrapper[4779]: I0929 19:08:37.934582 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:37 crc kubenswrapper[4779]: I0929 19:08:37.934646 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:37 crc kubenswrapper[4779]: I0929 19:08:37.934669 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:37 crc kubenswrapper[4779]: I0929 19:08:37.934698 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:08:37 crc kubenswrapper[4779]: I0929 19:08:37.934720 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:08:37Z","lastTransitionTime":"2025-09-29T19:08:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:08:37 crc kubenswrapper[4779]: I0929 19:08:37.978124 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-cvx8m" event={"ID":"54a33b8e-b623-4f91-be1d-a38dfcef17d7","Type":"ContainerStarted","Data":"49599fb872eb458cb3fd77da63518f77653f671e82a8fc1db5f9e2b64e3c4d1d"} Sep 29 19:08:37 crc kubenswrapper[4779]: I0929 19:08:37.984095 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-42vjg" event={"ID":"046df2ef-fb75-4d32-93e6-17b36af0a7c2","Type":"ContainerStarted","Data":"6b07760e693cb3b51af82207169b6c5af762338919293e6d3d2e79a95649bd1f"} Sep 29 19:08:37 crc kubenswrapper[4779]: I0929 19:08:37.984477 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-42vjg" Sep 29 19:08:37 crc kubenswrapper[4779]: I0929 19:08:37.984555 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-42vjg" Sep 29 19:08:37 crc kubenswrapper[4779]: I0929 19:08:37.996717 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-cvx8m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"54a33b8e-b623-4f91-be1d-a38dfcef17d7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://49599fb872eb458cb3fd77da63518f77653f671e82a8fc1db5f9e2b64e3c4d1d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e697e7c54a1b8adfde2dfa86399bfd13d895196c12108ac625bd9b94ad913d1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e697e7c54a1b8adfde2dfa86399bfd13d895196c12108ac625bd9b94ad913d1e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://75932bd27c986408eb2a3ec9aefffa392bf1ede4be0ddeabb9fdcd5fcc56199d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://75932bd27c986408eb2a3ec9aefffa392bf1ede4be0ddeabb9fdcd5fcc56199d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e058271bf65bb6f216298a9fc58b97c76335275829be7e5bec59d92688e83a05\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e058271bf65bb6f216298a9fc58b97c76335275829be7e5bec59d92688e83a05\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://edec2f7ae27e72dcc2eac62281dc9b866cdc31ecbda5e24020043aad86a44784\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://edec2f7ae27e72dcc2eac62281dc9b866cdc31ecbda5e24020043aad86a44784\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1750034ffcb61628b437e7b4f604be66ea6401d0ddb8a70f37fbb358c261670f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1750034ffcb61628b437e7b4f604be66ea6401d0ddb8a70f37fbb358c261670f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3409883fc5d9ed6b5e08d50c9d2821a4ac0b8932c9a0296c12065b0abb0354d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3409883fc5d9ed6b5e08d50c9d2821a4ac0b8932c9a0296c12065b0abb0354d6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:30Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-cvx8m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:37Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:38 crc kubenswrapper[4779]: I0929 19:08:38.012642 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"49dd46af-a7fa-47b0-94ab-c9a999564fca\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a67efc46aeb5956568b680b4041d83a55c0410ccefdb4e43b95503c6416e1dfc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2253a3406f6de9463c1c615ad2ac76ce935067baa57b260f18afdb8d9e639e95\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa9b804f65dc9265342d6ec10b8cdd31d49f692289c7fd8292944e25a2c95ebe\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://02428a90ed2dd1c36d800ffda10ed8759d92a87ec8f467cdf9fb11511b9b9420\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:09Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:38Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:38 crc kubenswrapper[4779]: I0929 19:08:38.028350 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ef5a50f788b42b32f4ec04eedc9f5c9dbcd07f64b430ec684d8a91b414e6c18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb910a5c408a63f6c4391cf2fae320e14d6dc945125b53eefde3b666ef9d0754\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:38Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:38 crc kubenswrapper[4779]: I0929 19:08:38.034409 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-42vjg" Sep 29 19:08:38 crc kubenswrapper[4779]: I0929 19:08:38.039014 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-42vjg" Sep 29 19:08:38 crc kubenswrapper[4779]: I0929 19:08:38.037838 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:38 crc kubenswrapper[4779]: I0929 19:08:38.039138 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:38 crc kubenswrapper[4779]: I0929 19:08:38.039155 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:38 crc kubenswrapper[4779]: I0929 19:08:38.039639 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:08:38 crc kubenswrapper[4779]: I0929 19:08:38.039694 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:08:38Z","lastTransitionTime":"2025-09-29T19:08:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:08:38 crc kubenswrapper[4779]: I0929 19:08:38.042079 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://15b9c6a3d7c79b30673cd8040a9f3033e2405cf2e29b38d8ba2bee9cbb384794\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:38Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:38 crc kubenswrapper[4779]: I0929 19:08:38.056258 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jfbb6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3ac24bbf-c37a-4253-be71-8d8f15cfd48e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2ea6959ae00f851559cc059498a3a7848fbbb55f6a25ed82e38efe62a7ee85ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8ckjq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:30Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jfbb6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:38Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:38 crc kubenswrapper[4779]: I0929 19:08:38.070133 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2f701cb-2e7d-4cea-8455-f68605964ac6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dfa32890cde2a0a47211239f797f83380b4418ad2b8fb1560c779705e68df39c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a017f2621de03e0184a0a168819a6762e4b36a1ae5c66a6bcfd1d65266fcb52\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a9691127cc0437ceebef7a82219adb83625690db4d6bf5cf1c8177a6a9e1284\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b14ae0b08f077b3acbe802e4a5ec06dd10075100c37d5587119a09a92b4651a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b14ae0b08f077b3acbe802e4a5ec06dd10075100c37d5587119a09a92b4651a\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T19:08:29Z\\\",\\\"message\\\":\\\"le observer\\\\nW0929 19:08:29.408025 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0929 19:08:29.408423 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 19:08:29.411188 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1038548135/tls.crt::/tmp/serving-cert-1038548135/tls.key\\\\\\\"\\\\nI0929 19:08:29.848353 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0929 19:08:29.866865 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0929 19:08:29.866980 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0929 19:08:29.867032 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0929 19:08:29.867061 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0929 19:08:29.872666 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0929 19:08:29.872766 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0929 19:08:29.872793 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0929 19:08:29.872817 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0929 19:08:29.872839 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0929 19:08:29.872861 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0929 19:08:29.872883 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0929 19:08:29.873077 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0929 19:08:29.875629 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:24Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f0916d37ad679d190403229b40390af2860e24f777a07cd5ef0ac2f7d3fc3c41\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5a737775c31cf8dc37838d904ee00f1075bc2aa52bea63587806cb7486950eaf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5a737775c31cf8dc37838d904ee00f1075bc2aa52bea63587806cb7486950eaf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:09Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:38Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:38 crc kubenswrapper[4779]: I0929 19:08:38.084292 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:38Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:38 crc kubenswrapper[4779]: I0929 19:08:38.101828 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-42vjg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"046df2ef-fb75-4d32-93e6-17b36af0a7c2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e1e03c63d08ebe2a4b76954140684a5b43da9e92b93490aa7efa715fc2aefb4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e1e03c63d08ebe2a4b76954140684a5b43da9e92b93490aa7efa715fc2aefb4c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:30Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-42vjg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:38Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:38 crc kubenswrapper[4779]: I0929 19:08:38.115880 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-zxpg4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e0db9b28-7e3a-4d44-9e98-1a07c8e5b8d6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2f0f837dfb05912fb4f7b0cb08cdb363cb3bfe2cc91a0bebd1b2b2b8128d78cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6ghv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:33Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-zxpg4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:38Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:38 crc kubenswrapper[4779]: I0929 19:08:38.126556 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-7hb2m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"45b89d12-bbd2-4b47-815d-a7421cc1aa00\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://002d12663649198e6136f3e9df16a3b1cd8c64906bf39bde03cd0dde8c36abbc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bvn4g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:30Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-7hb2m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:38Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:38 crc kubenswrapper[4779]: I0929 19:08:38.139806 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:38Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:38 crc kubenswrapper[4779]: I0929 19:08:38.143982 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:38 crc kubenswrapper[4779]: I0929 19:08:38.144039 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:38 crc kubenswrapper[4779]: I0929 19:08:38.144057 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:38 crc kubenswrapper[4779]: I0929 19:08:38.144078 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:08:38 crc kubenswrapper[4779]: I0929 19:08:38.144093 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:08:38Z","lastTransitionTime":"2025-09-29T19:08:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:08:38 crc kubenswrapper[4779]: I0929 19:08:38.151045 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:38Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:38 crc kubenswrapper[4779]: I0929 19:08:38.165071 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://04d9dd8d319497dd3ed7c565217d8b9d228a1952a3cd0e8dee3cdad781c5770a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:38Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:38 crc kubenswrapper[4779]: I0929 19:08:38.178074 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"476bc421-1113-455e-bcc8-e207e47dad19\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://30844145987b3dae28d2ca3f75a36e21c27216b267ff40fdb83094b8045403c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tb7zk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7168ecfffda2ef4f7782650466ccebb8abcff3293fcd33d44eb4ee24abf6339c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tb7zk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:30Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-d5cnr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:38Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:38 crc kubenswrapper[4779]: I0929 19:08:38.190939 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"49dd46af-a7fa-47b0-94ab-c9a999564fca\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a67efc46aeb5956568b680b4041d83a55c0410ccefdb4e43b95503c6416e1dfc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2253a3406f6de9463c1c615ad2ac76ce935067baa57b260f18afdb8d9e639e95\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa9b804f65dc9265342d6ec10b8cdd31d49f692289c7fd8292944e25a2c95ebe\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://02428a90ed2dd1c36d800ffda10ed8759d92a87ec8f467cdf9fb11511b9b9420\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:09Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:38Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:38 crc kubenswrapper[4779]: I0929 19:08:38.202472 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ef5a50f788b42b32f4ec04eedc9f5c9dbcd07f64b430ec684d8a91b414e6c18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb910a5c408a63f6c4391cf2fae320e14d6dc945125b53eefde3b666ef9d0754\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:38Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:38 crc kubenswrapper[4779]: I0929 19:08:38.213916 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://15b9c6a3d7c79b30673cd8040a9f3033e2405cf2e29b38d8ba2bee9cbb384794\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:38Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:38 crc kubenswrapper[4779]: I0929 19:08:38.227706 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jfbb6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3ac24bbf-c37a-4253-be71-8d8f15cfd48e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2ea6959ae00f851559cc059498a3a7848fbbb55f6a25ed82e38efe62a7ee85ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8ckjq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:30Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jfbb6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:38Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:38 crc kubenswrapper[4779]: I0929 19:08:38.244419 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-cvx8m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"54a33b8e-b623-4f91-be1d-a38dfcef17d7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://49599fb872eb458cb3fd77da63518f77653f671e82a8fc1db5f9e2b64e3c4d1d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e697e7c54a1b8adfde2dfa86399bfd13d895196c12108ac625bd9b94ad913d1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e697e7c54a1b8adfde2dfa86399bfd13d895196c12108ac625bd9b94ad913d1e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://75932bd27c986408eb2a3ec9aefffa392bf1ede4be0ddeabb9fdcd5fcc56199d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://75932bd27c986408eb2a3ec9aefffa392bf1ede4be0ddeabb9fdcd5fcc56199d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e058271bf65bb6f216298a9fc58b97c76335275829be7e5bec59d92688e83a05\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e058271bf65bb6f216298a9fc58b97c76335275829be7e5bec59d92688e83a05\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://edec2f7ae27e72dcc2eac62281dc9b866cdc31ecbda5e24020043aad86a44784\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://edec2f7ae27e72dcc2eac62281dc9b866cdc31ecbda5e24020043aad86a44784\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1750034ffcb61628b437e7b4f604be66ea6401d0ddb8a70f37fbb358c261670f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1750034ffcb61628b437e7b4f604be66ea6401d0ddb8a70f37fbb358c261670f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3409883fc5d9ed6b5e08d50c9d2821a4ac0b8932c9a0296c12065b0abb0354d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3409883fc5d9ed6b5e08d50c9d2821a4ac0b8932c9a0296c12065b0abb0354d6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:30Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-cvx8m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:38Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:38 crc kubenswrapper[4779]: I0929 19:08:38.246267 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:38 crc kubenswrapper[4779]: I0929 19:08:38.246352 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:38 crc kubenswrapper[4779]: I0929 19:08:38.246364 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:38 crc kubenswrapper[4779]: I0929 19:08:38.246382 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:08:38 crc kubenswrapper[4779]: I0929 19:08:38.246391 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:08:38Z","lastTransitionTime":"2025-09-29T19:08:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:08:38 crc kubenswrapper[4779]: I0929 19:08:38.257544 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2f701cb-2e7d-4cea-8455-f68605964ac6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dfa32890cde2a0a47211239f797f83380b4418ad2b8fb1560c779705e68df39c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a017f2621de03e0184a0a168819a6762e4b36a1ae5c66a6bcfd1d65266fcb52\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a9691127cc0437ceebef7a82219adb83625690db4d6bf5cf1c8177a6a9e1284\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b14ae0b08f077b3acbe802e4a5ec06dd10075100c37d5587119a09a92b4651a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b14ae0b08f077b3acbe802e4a5ec06dd10075100c37d5587119a09a92b4651a\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T19:08:29Z\\\",\\\"message\\\":\\\"le observer\\\\nW0929 19:08:29.408025 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0929 19:08:29.408423 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 19:08:29.411188 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1038548135/tls.crt::/tmp/serving-cert-1038548135/tls.key\\\\\\\"\\\\nI0929 19:08:29.848353 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0929 19:08:29.866865 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0929 19:08:29.866980 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0929 19:08:29.867032 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0929 19:08:29.867061 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0929 19:08:29.872666 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0929 19:08:29.872766 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0929 19:08:29.872793 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0929 19:08:29.872817 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0929 19:08:29.872839 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0929 19:08:29.872861 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0929 19:08:29.872883 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0929 19:08:29.873077 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0929 19:08:29.875629 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:24Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f0916d37ad679d190403229b40390af2860e24f777a07cd5ef0ac2f7d3fc3c41\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5a737775c31cf8dc37838d904ee00f1075bc2aa52bea63587806cb7486950eaf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5a737775c31cf8dc37838d904ee00f1075bc2aa52bea63587806cb7486950eaf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:09Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:38Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:38 crc kubenswrapper[4779]: I0929 19:08:38.270977 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:38Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:38 crc kubenswrapper[4779]: I0929 19:08:38.288707 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-42vjg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"046df2ef-fb75-4d32-93e6-17b36af0a7c2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://629d703a556665e86ce3b0a5af995157440c71c8ab7d29b9259865cd624543c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1555e96e70a9119cf2298d91512b1d8cffb303b4327d0e20bdf686e7e36e4461\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0d18ddf7528c789be80d32cd6921ada5ed114d01130b7be97e73ec6d9168267e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b6ffbcb52a6e250cd4b4bc4e8c089ef94babaaf0db836fec9f477450464eebe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://76304a56be996b646d706c0e7df995bb1518962396c7558304b1627e5c215c11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6dcd0277739f6d5e5c90c617485f053c1c3996192f15b081c660741987ec2611\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b07760e693cb3b51af82207169b6c5af762338919293e6d3d2e79a95649bd1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c8993f5be0b77b752f60e7346f609aaeef20db2f0e72cbebde06a11fa7d2f5bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e1e03c63d08ebe2a4b76954140684a5b43da9e92b93490aa7efa715fc2aefb4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e1e03c63d08ebe2a4b76954140684a5b43da9e92b93490aa7efa715fc2aefb4c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:30Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-42vjg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:38Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:38 crc kubenswrapper[4779]: I0929 19:08:38.300157 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-zxpg4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e0db9b28-7e3a-4d44-9e98-1a07c8e5b8d6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2f0f837dfb05912fb4f7b0cb08cdb363cb3bfe2cc91a0bebd1b2b2b8128d78cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6ghv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:33Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-zxpg4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:38Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:38 crc kubenswrapper[4779]: I0929 19:08:38.312267 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:38Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:38 crc kubenswrapper[4779]: I0929 19:08:38.321876 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-7hb2m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"45b89d12-bbd2-4b47-815d-a7421cc1aa00\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://002d12663649198e6136f3e9df16a3b1cd8c64906bf39bde03cd0dde8c36abbc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bvn4g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:30Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-7hb2m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:38Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:38 crc kubenswrapper[4779]: I0929 19:08:38.333233 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:38Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:38 crc kubenswrapper[4779]: I0929 19:08:38.346956 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://04d9dd8d319497dd3ed7c565217d8b9d228a1952a3cd0e8dee3cdad781c5770a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:38Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:38 crc kubenswrapper[4779]: I0929 19:08:38.348335 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:38 crc kubenswrapper[4779]: I0929 19:08:38.348361 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:38 crc kubenswrapper[4779]: I0929 19:08:38.348371 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:38 crc kubenswrapper[4779]: I0929 19:08:38.348388 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:08:38 crc kubenswrapper[4779]: I0929 19:08:38.348399 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:08:38Z","lastTransitionTime":"2025-09-29T19:08:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:08:38 crc kubenswrapper[4779]: I0929 19:08:38.359215 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"476bc421-1113-455e-bcc8-e207e47dad19\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://30844145987b3dae28d2ca3f75a36e21c27216b267ff40fdb83094b8045403c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tb7zk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7168ecfffda2ef4f7782650466ccebb8abcff3293fcd33d44eb4ee24abf6339c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tb7zk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:30Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-d5cnr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:38Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:38 crc kubenswrapper[4779]: I0929 19:08:38.450415 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:38 crc kubenswrapper[4779]: I0929 19:08:38.450478 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:38 crc kubenswrapper[4779]: I0929 19:08:38.450497 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:38 crc kubenswrapper[4779]: I0929 19:08:38.450521 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:08:38 crc kubenswrapper[4779]: I0929 19:08:38.450538 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:08:38Z","lastTransitionTime":"2025-09-29T19:08:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:08:38 crc kubenswrapper[4779]: I0929 19:08:38.553289 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:38 crc kubenswrapper[4779]: I0929 19:08:38.553400 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:38 crc kubenswrapper[4779]: I0929 19:08:38.553420 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:38 crc kubenswrapper[4779]: I0929 19:08:38.553448 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:08:38 crc kubenswrapper[4779]: I0929 19:08:38.553467 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:08:38Z","lastTransitionTime":"2025-09-29T19:08:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:08:38 crc kubenswrapper[4779]: I0929 19:08:38.572747 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 19:08:38 crc kubenswrapper[4779]: E0929 19:08:38.572884 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 19:08:46.572862679 +0000 UTC m=+37.457287779 (durationBeforeRetry 8s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 19:08:38 crc kubenswrapper[4779]: I0929 19:08:38.572971 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 19:08:38 crc kubenswrapper[4779]: I0929 19:08:38.573013 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 19:08:38 crc kubenswrapper[4779]: I0929 19:08:38.573047 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 19:08:38 crc kubenswrapper[4779]: E0929 19:08:38.573091 4779 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Sep 29 19:08:38 crc kubenswrapper[4779]: E0929 19:08:38.573108 4779 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Sep 29 19:08:38 crc kubenswrapper[4779]: E0929 19:08:38.573120 4779 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 29 19:08:38 crc kubenswrapper[4779]: E0929 19:08:38.573154 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-09-29 19:08:46.573146546 +0000 UTC m=+37.457571646 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 29 19:08:38 crc kubenswrapper[4779]: E0929 19:08:38.573191 4779 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Sep 29 19:08:38 crc kubenswrapper[4779]: E0929 19:08:38.573259 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-09-29 19:08:46.573236939 +0000 UTC m=+37.457662079 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Sep 29 19:08:38 crc kubenswrapper[4779]: E0929 19:08:38.573403 4779 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Sep 29 19:08:38 crc kubenswrapper[4779]: I0929 19:08:38.573103 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 19:08:38 crc kubenswrapper[4779]: E0929 19:08:38.573469 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-09-29 19:08:46.573449844 +0000 UTC m=+37.457875014 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Sep 29 19:08:38 crc kubenswrapper[4779]: E0929 19:08:38.573531 4779 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Sep 29 19:08:38 crc kubenswrapper[4779]: E0929 19:08:38.573543 4779 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Sep 29 19:08:38 crc kubenswrapper[4779]: E0929 19:08:38.573552 4779 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 29 19:08:38 crc kubenswrapper[4779]: E0929 19:08:38.573589 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-09-29 19:08:46.573581368 +0000 UTC m=+37.458006468 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 29 19:08:38 crc kubenswrapper[4779]: I0929 19:08:38.600168 4779 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 29 19:08:38 crc kubenswrapper[4779]: I0929 19:08:38.601143 4779 scope.go:117] "RemoveContainer" containerID="7b14ae0b08f077b3acbe802e4a5ec06dd10075100c37d5587119a09a92b4651a" Sep 29 19:08:38 crc kubenswrapper[4779]: E0929 19:08:38.601346 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-apiserver-check-endpoints\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\"" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" Sep 29 19:08:38 crc kubenswrapper[4779]: I0929 19:08:38.656643 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:38 crc kubenswrapper[4779]: I0929 19:08:38.656710 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:38 crc kubenswrapper[4779]: I0929 19:08:38.656721 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:38 crc kubenswrapper[4779]: I0929 19:08:38.656772 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:08:38 crc kubenswrapper[4779]: I0929 19:08:38.656793 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:08:38Z","lastTransitionTime":"2025-09-29T19:08:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:08:38 crc kubenswrapper[4779]: I0929 19:08:38.759529 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:38 crc kubenswrapper[4779]: I0929 19:08:38.759591 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:38 crc kubenswrapper[4779]: I0929 19:08:38.759618 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:38 crc kubenswrapper[4779]: I0929 19:08:38.759648 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:08:38 crc kubenswrapper[4779]: I0929 19:08:38.759688 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:08:38Z","lastTransitionTime":"2025-09-29T19:08:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:08:38 crc kubenswrapper[4779]: I0929 19:08:38.765900 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 19:08:38 crc kubenswrapper[4779]: I0929 19:08:38.765978 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 19:08:38 crc kubenswrapper[4779]: I0929 19:08:38.765920 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 19:08:38 crc kubenswrapper[4779]: E0929 19:08:38.766071 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 19:08:38 crc kubenswrapper[4779]: E0929 19:08:38.766183 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 19:08:38 crc kubenswrapper[4779]: E0929 19:08:38.766372 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 19:08:38 crc kubenswrapper[4779]: I0929 19:08:38.862565 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:38 crc kubenswrapper[4779]: I0929 19:08:38.862624 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:38 crc kubenswrapper[4779]: I0929 19:08:38.862640 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:38 crc kubenswrapper[4779]: I0929 19:08:38.862662 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:08:38 crc kubenswrapper[4779]: I0929 19:08:38.862679 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:08:38Z","lastTransitionTime":"2025-09-29T19:08:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:08:38 crc kubenswrapper[4779]: I0929 19:08:38.965060 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:38 crc kubenswrapper[4779]: I0929 19:08:38.965121 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:38 crc kubenswrapper[4779]: I0929 19:08:38.965138 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:38 crc kubenswrapper[4779]: I0929 19:08:38.965163 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:08:38 crc kubenswrapper[4779]: I0929 19:08:38.965180 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:08:38Z","lastTransitionTime":"2025-09-29T19:08:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:08:38 crc kubenswrapper[4779]: I0929 19:08:38.987868 4779 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Sep 29 19:08:39 crc kubenswrapper[4779]: I0929 19:08:39.068886 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:39 crc kubenswrapper[4779]: I0929 19:08:39.068964 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:39 crc kubenswrapper[4779]: I0929 19:08:39.068982 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:39 crc kubenswrapper[4779]: I0929 19:08:39.069005 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:08:39 crc kubenswrapper[4779]: I0929 19:08:39.069023 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:08:39Z","lastTransitionTime":"2025-09-29T19:08:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:08:39 crc kubenswrapper[4779]: I0929 19:08:39.172494 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:39 crc kubenswrapper[4779]: I0929 19:08:39.172606 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:39 crc kubenswrapper[4779]: I0929 19:08:39.172623 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:39 crc kubenswrapper[4779]: I0929 19:08:39.172648 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:08:39 crc kubenswrapper[4779]: I0929 19:08:39.172666 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:08:39Z","lastTransitionTime":"2025-09-29T19:08:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:08:39 crc kubenswrapper[4779]: I0929 19:08:39.275560 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:39 crc kubenswrapper[4779]: I0929 19:08:39.275611 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:39 crc kubenswrapper[4779]: I0929 19:08:39.275628 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:39 crc kubenswrapper[4779]: I0929 19:08:39.275649 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:08:39 crc kubenswrapper[4779]: I0929 19:08:39.275666 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:08:39Z","lastTransitionTime":"2025-09-29T19:08:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:08:39 crc kubenswrapper[4779]: I0929 19:08:39.378807 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:39 crc kubenswrapper[4779]: I0929 19:08:39.378858 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:39 crc kubenswrapper[4779]: I0929 19:08:39.378871 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:39 crc kubenswrapper[4779]: I0929 19:08:39.378889 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:08:39 crc kubenswrapper[4779]: I0929 19:08:39.378901 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:08:39Z","lastTransitionTime":"2025-09-29T19:08:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:08:39 crc kubenswrapper[4779]: I0929 19:08:39.482195 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:39 crc kubenswrapper[4779]: I0929 19:08:39.482252 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:39 crc kubenswrapper[4779]: I0929 19:08:39.482270 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:39 crc kubenswrapper[4779]: I0929 19:08:39.482295 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:08:39 crc kubenswrapper[4779]: I0929 19:08:39.482313 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:08:39Z","lastTransitionTime":"2025-09-29T19:08:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:08:39 crc kubenswrapper[4779]: I0929 19:08:39.586423 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:39 crc kubenswrapper[4779]: I0929 19:08:39.586479 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:39 crc kubenswrapper[4779]: I0929 19:08:39.586489 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:39 crc kubenswrapper[4779]: I0929 19:08:39.586504 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:08:39 crc kubenswrapper[4779]: I0929 19:08:39.586516 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:08:39Z","lastTransitionTime":"2025-09-29T19:08:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:08:39 crc kubenswrapper[4779]: I0929 19:08:39.688528 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:39 crc kubenswrapper[4779]: I0929 19:08:39.688568 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:39 crc kubenswrapper[4779]: I0929 19:08:39.688582 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:39 crc kubenswrapper[4779]: I0929 19:08:39.688602 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:08:39 crc kubenswrapper[4779]: I0929 19:08:39.688616 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:08:39Z","lastTransitionTime":"2025-09-29T19:08:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:08:39 crc kubenswrapper[4779]: I0929 19:08:39.785682 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:39Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:39 crc kubenswrapper[4779]: I0929 19:08:39.790220 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:39 crc kubenswrapper[4779]: I0929 19:08:39.790255 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:39 crc kubenswrapper[4779]: I0929 19:08:39.790265 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:39 crc kubenswrapper[4779]: I0929 19:08:39.790279 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:08:39 crc kubenswrapper[4779]: I0929 19:08:39.790288 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:08:39Z","lastTransitionTime":"2025-09-29T19:08:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:08:39 crc kubenswrapper[4779]: I0929 19:08:39.802166 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://04d9dd8d319497dd3ed7c565217d8b9d228a1952a3cd0e8dee3cdad781c5770a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:39Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:39 crc kubenswrapper[4779]: I0929 19:08:39.822265 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"476bc421-1113-455e-bcc8-e207e47dad19\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://30844145987b3dae28d2ca3f75a36e21c27216b267ff40fdb83094b8045403c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tb7zk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7168ecfffda2ef4f7782650466ccebb8abcff3293fcd33d44eb4ee24abf6339c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tb7zk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:30Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-d5cnr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:39Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:39 crc kubenswrapper[4779]: I0929 19:08:39.848133 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"49dd46af-a7fa-47b0-94ab-c9a999564fca\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a67efc46aeb5956568b680b4041d83a55c0410ccefdb4e43b95503c6416e1dfc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2253a3406f6de9463c1c615ad2ac76ce935067baa57b260f18afdb8d9e639e95\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa9b804f65dc9265342d6ec10b8cdd31d49f692289c7fd8292944e25a2c95ebe\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://02428a90ed2dd1c36d800ffda10ed8759d92a87ec8f467cdf9fb11511b9b9420\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:09Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:39Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:39 crc kubenswrapper[4779]: I0929 19:08:39.865256 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ef5a50f788b42b32f4ec04eedc9f5c9dbcd07f64b430ec684d8a91b414e6c18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb910a5c408a63f6c4391cf2fae320e14d6dc945125b53eefde3b666ef9d0754\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:39Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:39 crc kubenswrapper[4779]: I0929 19:08:39.876401 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://15b9c6a3d7c79b30673cd8040a9f3033e2405cf2e29b38d8ba2bee9cbb384794\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:39Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:39 crc kubenswrapper[4779]: I0929 19:08:39.890257 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jfbb6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3ac24bbf-c37a-4253-be71-8d8f15cfd48e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2ea6959ae00f851559cc059498a3a7848fbbb55f6a25ed82e38efe62a7ee85ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8ckjq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:30Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jfbb6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:39Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:39 crc kubenswrapper[4779]: I0929 19:08:39.892398 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:39 crc kubenswrapper[4779]: I0929 19:08:39.892456 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:39 crc kubenswrapper[4779]: I0929 19:08:39.892469 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:39 crc kubenswrapper[4779]: I0929 19:08:39.892488 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:08:39 crc kubenswrapper[4779]: I0929 19:08:39.892500 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:08:39Z","lastTransitionTime":"2025-09-29T19:08:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:08:39 crc kubenswrapper[4779]: I0929 19:08:39.902575 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-cvx8m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"54a33b8e-b623-4f91-be1d-a38dfcef17d7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://49599fb872eb458cb3fd77da63518f77653f671e82a8fc1db5f9e2b64e3c4d1d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e697e7c54a1b8adfde2dfa86399bfd13d895196c12108ac625bd9b94ad913d1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e697e7c54a1b8adfde2dfa86399bfd13d895196c12108ac625bd9b94ad913d1e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://75932bd27c986408eb2a3ec9aefffa392bf1ede4be0ddeabb9fdcd5fcc56199d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://75932bd27c986408eb2a3ec9aefffa392bf1ede4be0ddeabb9fdcd5fcc56199d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e058271bf65bb6f216298a9fc58b97c76335275829be7e5bec59d92688e83a05\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e058271bf65bb6f216298a9fc58b97c76335275829be7e5bec59d92688e83a05\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://edec2f7ae27e72dcc2eac62281dc9b866cdc31ecbda5e24020043aad86a44784\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://edec2f7ae27e72dcc2eac62281dc9b866cdc31ecbda5e24020043aad86a44784\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1750034ffcb61628b437e7b4f604be66ea6401d0ddb8a70f37fbb358c261670f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1750034ffcb61628b437e7b4f604be66ea6401d0ddb8a70f37fbb358c261670f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3409883fc5d9ed6b5e08d50c9d2821a4ac0b8932c9a0296c12065b0abb0354d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3409883fc5d9ed6b5e08d50c9d2821a4ac0b8932c9a0296c12065b0abb0354d6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:30Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-cvx8m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:39Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:39 crc kubenswrapper[4779]: I0929 19:08:39.915595 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2f701cb-2e7d-4cea-8455-f68605964ac6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dfa32890cde2a0a47211239f797f83380b4418ad2b8fb1560c779705e68df39c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a017f2621de03e0184a0a168819a6762e4b36a1ae5c66a6bcfd1d65266fcb52\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a9691127cc0437ceebef7a82219adb83625690db4d6bf5cf1c8177a6a9e1284\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b14ae0b08f077b3acbe802e4a5ec06dd10075100c37d5587119a09a92b4651a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b14ae0b08f077b3acbe802e4a5ec06dd10075100c37d5587119a09a92b4651a\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T19:08:29Z\\\",\\\"message\\\":\\\"le observer\\\\nW0929 19:08:29.408025 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0929 19:08:29.408423 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 19:08:29.411188 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1038548135/tls.crt::/tmp/serving-cert-1038548135/tls.key\\\\\\\"\\\\nI0929 19:08:29.848353 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0929 19:08:29.866865 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0929 19:08:29.866980 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0929 19:08:29.867032 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0929 19:08:29.867061 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0929 19:08:29.872666 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0929 19:08:29.872766 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0929 19:08:29.872793 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0929 19:08:29.872817 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0929 19:08:29.872839 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0929 19:08:29.872861 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0929 19:08:29.872883 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0929 19:08:29.873077 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0929 19:08:29.875629 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:24Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f0916d37ad679d190403229b40390af2860e24f777a07cd5ef0ac2f7d3fc3c41\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5a737775c31cf8dc37838d904ee00f1075bc2aa52bea63587806cb7486950eaf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5a737775c31cf8dc37838d904ee00f1075bc2aa52bea63587806cb7486950eaf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:09Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:39Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:39 crc kubenswrapper[4779]: I0929 19:08:39.927572 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:39Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:39 crc kubenswrapper[4779]: I0929 19:08:39.947460 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-42vjg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"046df2ef-fb75-4d32-93e6-17b36af0a7c2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://629d703a556665e86ce3b0a5af995157440c71c8ab7d29b9259865cd624543c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1555e96e70a9119cf2298d91512b1d8cffb303b4327d0e20bdf686e7e36e4461\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0d18ddf7528c789be80d32cd6921ada5ed114d01130b7be97e73ec6d9168267e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b6ffbcb52a6e250cd4b4bc4e8c089ef94babaaf0db836fec9f477450464eebe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://76304a56be996b646d706c0e7df995bb1518962396c7558304b1627e5c215c11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6dcd0277739f6d5e5c90c617485f053c1c3996192f15b081c660741987ec2611\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b07760e693cb3b51af82207169b6c5af762338919293e6d3d2e79a95649bd1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c8993f5be0b77b752f60e7346f609aaeef20db2f0e72cbebde06a11fa7d2f5bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e1e03c63d08ebe2a4b76954140684a5b43da9e92b93490aa7efa715fc2aefb4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e1e03c63d08ebe2a4b76954140684a5b43da9e92b93490aa7efa715fc2aefb4c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:30Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-42vjg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:39Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:39 crc kubenswrapper[4779]: I0929 19:08:39.958765 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-zxpg4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e0db9b28-7e3a-4d44-9e98-1a07c8e5b8d6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2f0f837dfb05912fb4f7b0cb08cdb363cb3bfe2cc91a0bebd1b2b2b8128d78cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6ghv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:33Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-zxpg4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:39Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:39 crc kubenswrapper[4779]: I0929 19:08:39.968758 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-7hb2m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"45b89d12-bbd2-4b47-815d-a7421cc1aa00\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://002d12663649198e6136f3e9df16a3b1cd8c64906bf39bde03cd0dde8c36abbc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bvn4g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:30Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-7hb2m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:39Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:39 crc kubenswrapper[4779]: I0929 19:08:39.980543 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:39Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:39 crc kubenswrapper[4779]: I0929 19:08:39.992624 4779 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Sep 29 19:08:39 crc kubenswrapper[4779]: I0929 19:08:39.996654 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:39 crc kubenswrapper[4779]: I0929 19:08:39.996693 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:39 crc kubenswrapper[4779]: I0929 19:08:39.996703 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:39 crc kubenswrapper[4779]: I0929 19:08:39.996719 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:08:39 crc kubenswrapper[4779]: I0929 19:08:39.996729 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:08:39Z","lastTransitionTime":"2025-09-29T19:08:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:08:40 crc kubenswrapper[4779]: I0929 19:08:40.098715 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:40 crc kubenswrapper[4779]: I0929 19:08:40.098768 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:40 crc kubenswrapper[4779]: I0929 19:08:40.098797 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:40 crc kubenswrapper[4779]: I0929 19:08:40.098817 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:08:40 crc kubenswrapper[4779]: I0929 19:08:40.098827 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:08:40Z","lastTransitionTime":"2025-09-29T19:08:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:08:40 crc kubenswrapper[4779]: I0929 19:08:40.201738 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:40 crc kubenswrapper[4779]: I0929 19:08:40.201780 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:40 crc kubenswrapper[4779]: I0929 19:08:40.201792 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:40 crc kubenswrapper[4779]: I0929 19:08:40.201809 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:08:40 crc kubenswrapper[4779]: I0929 19:08:40.201822 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:08:40Z","lastTransitionTime":"2025-09-29T19:08:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:08:40 crc kubenswrapper[4779]: I0929 19:08:40.304952 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:40 crc kubenswrapper[4779]: I0929 19:08:40.305009 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:40 crc kubenswrapper[4779]: I0929 19:08:40.305025 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:40 crc kubenswrapper[4779]: I0929 19:08:40.305048 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:08:40 crc kubenswrapper[4779]: I0929 19:08:40.305064 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:08:40Z","lastTransitionTime":"2025-09-29T19:08:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:08:40 crc kubenswrapper[4779]: I0929 19:08:40.407897 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:40 crc kubenswrapper[4779]: I0929 19:08:40.407972 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:40 crc kubenswrapper[4779]: I0929 19:08:40.407982 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:40 crc kubenswrapper[4779]: I0929 19:08:40.407997 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:08:40 crc kubenswrapper[4779]: I0929 19:08:40.408010 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:08:40Z","lastTransitionTime":"2025-09-29T19:08:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:08:40 crc kubenswrapper[4779]: I0929 19:08:40.510947 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:40 crc kubenswrapper[4779]: I0929 19:08:40.510997 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:40 crc kubenswrapper[4779]: I0929 19:08:40.511008 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:40 crc kubenswrapper[4779]: I0929 19:08:40.511023 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:08:40 crc kubenswrapper[4779]: I0929 19:08:40.511033 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:08:40Z","lastTransitionTime":"2025-09-29T19:08:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:08:40 crc kubenswrapper[4779]: I0929 19:08:40.614425 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:40 crc kubenswrapper[4779]: I0929 19:08:40.614493 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:40 crc kubenswrapper[4779]: I0929 19:08:40.614510 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:40 crc kubenswrapper[4779]: I0929 19:08:40.614537 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:08:40 crc kubenswrapper[4779]: I0929 19:08:40.614555 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:08:40Z","lastTransitionTime":"2025-09-29T19:08:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:08:40 crc kubenswrapper[4779]: I0929 19:08:40.718077 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:40 crc kubenswrapper[4779]: I0929 19:08:40.718127 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:40 crc kubenswrapper[4779]: I0929 19:08:40.718139 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:40 crc kubenswrapper[4779]: I0929 19:08:40.718158 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:08:40 crc kubenswrapper[4779]: I0929 19:08:40.718186 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:08:40Z","lastTransitionTime":"2025-09-29T19:08:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:08:40 crc kubenswrapper[4779]: I0929 19:08:40.765902 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 19:08:40 crc kubenswrapper[4779]: I0929 19:08:40.765990 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 19:08:40 crc kubenswrapper[4779]: E0929 19:08:40.766049 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 19:08:40 crc kubenswrapper[4779]: I0929 19:08:40.765913 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 19:08:40 crc kubenswrapper[4779]: E0929 19:08:40.766167 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 19:08:40 crc kubenswrapper[4779]: E0929 19:08:40.766264 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 19:08:40 crc kubenswrapper[4779]: I0929 19:08:40.820872 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:40 crc kubenswrapper[4779]: I0929 19:08:40.820949 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:40 crc kubenswrapper[4779]: I0929 19:08:40.820966 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:40 crc kubenswrapper[4779]: I0929 19:08:40.821307 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:08:40 crc kubenswrapper[4779]: I0929 19:08:40.821372 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:08:40Z","lastTransitionTime":"2025-09-29T19:08:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:08:40 crc kubenswrapper[4779]: I0929 19:08:40.923918 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:40 crc kubenswrapper[4779]: I0929 19:08:40.923975 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:40 crc kubenswrapper[4779]: I0929 19:08:40.923991 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:40 crc kubenswrapper[4779]: I0929 19:08:40.924013 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:08:40 crc kubenswrapper[4779]: I0929 19:08:40.924029 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:08:40Z","lastTransitionTime":"2025-09-29T19:08:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:08:40 crc kubenswrapper[4779]: I0929 19:08:40.999463 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-42vjg_046df2ef-fb75-4d32-93e6-17b36af0a7c2/ovnkube-controller/0.log" Sep 29 19:08:41 crc kubenswrapper[4779]: I0929 19:08:41.003345 4779 generic.go:334] "Generic (PLEG): container finished" podID="046df2ef-fb75-4d32-93e6-17b36af0a7c2" containerID="6b07760e693cb3b51af82207169b6c5af762338919293e6d3d2e79a95649bd1f" exitCode=1 Sep 29 19:08:41 crc kubenswrapper[4779]: I0929 19:08:41.003417 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-42vjg" event={"ID":"046df2ef-fb75-4d32-93e6-17b36af0a7c2","Type":"ContainerDied","Data":"6b07760e693cb3b51af82207169b6c5af762338919293e6d3d2e79a95649bd1f"} Sep 29 19:08:41 crc kubenswrapper[4779]: I0929 19:08:41.004666 4779 scope.go:117] "RemoveContainer" containerID="6b07760e693cb3b51af82207169b6c5af762338919293e6d3d2e79a95649bd1f" Sep 29 19:08:41 crc kubenswrapper[4779]: I0929 19:08:41.018667 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-7hb2m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"45b89d12-bbd2-4b47-815d-a7421cc1aa00\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://002d12663649198e6136f3e9df16a3b1cd8c64906bf39bde03cd0dde8c36abbc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bvn4g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:30Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-7hb2m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:41Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:41 crc kubenswrapper[4779]: I0929 19:08:41.026861 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:41 crc kubenswrapper[4779]: I0929 19:08:41.026901 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:41 crc kubenswrapper[4779]: I0929 19:08:41.026915 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:41 crc kubenswrapper[4779]: I0929 19:08:41.026934 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:08:41 crc kubenswrapper[4779]: I0929 19:08:41.026946 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:08:41Z","lastTransitionTime":"2025-09-29T19:08:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:08:41 crc kubenswrapper[4779]: I0929 19:08:41.038662 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:41Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:41 crc kubenswrapper[4779]: I0929 19:08:41.054114 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:41Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:41 crc kubenswrapper[4779]: I0929 19:08:41.069797 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://04d9dd8d319497dd3ed7c565217d8b9d228a1952a3cd0e8dee3cdad781c5770a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:41Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:41 crc kubenswrapper[4779]: I0929 19:08:41.085843 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"476bc421-1113-455e-bcc8-e207e47dad19\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://30844145987b3dae28d2ca3f75a36e21c27216b267ff40fdb83094b8045403c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tb7zk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7168ecfffda2ef4f7782650466ccebb8abcff3293fcd33d44eb4ee24abf6339c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tb7zk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:30Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-d5cnr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:41Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:41 crc kubenswrapper[4779]: I0929 19:08:41.103525 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-cvx8m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"54a33b8e-b623-4f91-be1d-a38dfcef17d7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://49599fb872eb458cb3fd77da63518f77653f671e82a8fc1db5f9e2b64e3c4d1d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e697e7c54a1b8adfde2dfa86399bfd13d895196c12108ac625bd9b94ad913d1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e697e7c54a1b8adfde2dfa86399bfd13d895196c12108ac625bd9b94ad913d1e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://75932bd27c986408eb2a3ec9aefffa392bf1ede4be0ddeabb9fdcd5fcc56199d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://75932bd27c986408eb2a3ec9aefffa392bf1ede4be0ddeabb9fdcd5fcc56199d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e058271bf65bb6f216298a9fc58b97c76335275829be7e5bec59d92688e83a05\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e058271bf65bb6f216298a9fc58b97c76335275829be7e5bec59d92688e83a05\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://edec2f7ae27e72dcc2eac62281dc9b866cdc31ecbda5e24020043aad86a44784\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://edec2f7ae27e72dcc2eac62281dc9b866cdc31ecbda5e24020043aad86a44784\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1750034ffcb61628b437e7b4f604be66ea6401d0ddb8a70f37fbb358c261670f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1750034ffcb61628b437e7b4f604be66ea6401d0ddb8a70f37fbb358c261670f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3409883fc5d9ed6b5e08d50c9d2821a4ac0b8932c9a0296c12065b0abb0354d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3409883fc5d9ed6b5e08d50c9d2821a4ac0b8932c9a0296c12065b0abb0354d6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:30Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-cvx8m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:41Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:41 crc kubenswrapper[4779]: I0929 19:08:41.116303 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"49dd46af-a7fa-47b0-94ab-c9a999564fca\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a67efc46aeb5956568b680b4041d83a55c0410ccefdb4e43b95503c6416e1dfc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2253a3406f6de9463c1c615ad2ac76ce935067baa57b260f18afdb8d9e639e95\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa9b804f65dc9265342d6ec10b8cdd31d49f692289c7fd8292944e25a2c95ebe\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://02428a90ed2dd1c36d800ffda10ed8759d92a87ec8f467cdf9fb11511b9b9420\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:09Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:41Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:41 crc kubenswrapper[4779]: I0929 19:08:41.129171 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:41 crc kubenswrapper[4779]: I0929 19:08:41.129224 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:41 crc kubenswrapper[4779]: I0929 19:08:41.129241 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:41 crc kubenswrapper[4779]: I0929 19:08:41.129265 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:08:41 crc kubenswrapper[4779]: I0929 19:08:41.129282 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:08:41Z","lastTransitionTime":"2025-09-29T19:08:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:08:41 crc kubenswrapper[4779]: I0929 19:08:41.134976 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ef5a50f788b42b32f4ec04eedc9f5c9dbcd07f64b430ec684d8a91b414e6c18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb910a5c408a63f6c4391cf2fae320e14d6dc945125b53eefde3b666ef9d0754\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:41Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:41 crc kubenswrapper[4779]: I0929 19:08:41.147090 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://15b9c6a3d7c79b30673cd8040a9f3033e2405cf2e29b38d8ba2bee9cbb384794\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:41Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:41 crc kubenswrapper[4779]: I0929 19:08:41.158564 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jfbb6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3ac24bbf-c37a-4253-be71-8d8f15cfd48e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2ea6959ae00f851559cc059498a3a7848fbbb55f6a25ed82e38efe62a7ee85ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8ckjq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:30Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jfbb6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:41Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:41 crc kubenswrapper[4779]: I0929 19:08:41.194583 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2f701cb-2e7d-4cea-8455-f68605964ac6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dfa32890cde2a0a47211239f797f83380b4418ad2b8fb1560c779705e68df39c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a017f2621de03e0184a0a168819a6762e4b36a1ae5c66a6bcfd1d65266fcb52\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a9691127cc0437ceebef7a82219adb83625690db4d6bf5cf1c8177a6a9e1284\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b14ae0b08f077b3acbe802e4a5ec06dd10075100c37d5587119a09a92b4651a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b14ae0b08f077b3acbe802e4a5ec06dd10075100c37d5587119a09a92b4651a\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T19:08:29Z\\\",\\\"message\\\":\\\"le observer\\\\nW0929 19:08:29.408025 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0929 19:08:29.408423 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 19:08:29.411188 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1038548135/tls.crt::/tmp/serving-cert-1038548135/tls.key\\\\\\\"\\\\nI0929 19:08:29.848353 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0929 19:08:29.866865 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0929 19:08:29.866980 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0929 19:08:29.867032 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0929 19:08:29.867061 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0929 19:08:29.872666 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0929 19:08:29.872766 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0929 19:08:29.872793 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0929 19:08:29.872817 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0929 19:08:29.872839 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0929 19:08:29.872861 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0929 19:08:29.872883 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0929 19:08:29.873077 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0929 19:08:29.875629 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:24Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f0916d37ad679d190403229b40390af2860e24f777a07cd5ef0ac2f7d3fc3c41\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5a737775c31cf8dc37838d904ee00f1075bc2aa52bea63587806cb7486950eaf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5a737775c31cf8dc37838d904ee00f1075bc2aa52bea63587806cb7486950eaf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:09Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:41Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:41 crc kubenswrapper[4779]: I0929 19:08:41.228674 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:41Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:41 crc kubenswrapper[4779]: I0929 19:08:41.231485 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:41 crc kubenswrapper[4779]: I0929 19:08:41.231513 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:41 crc kubenswrapper[4779]: I0929 19:08:41.231550 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:41 crc kubenswrapper[4779]: I0929 19:08:41.231593 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:08:41 crc kubenswrapper[4779]: I0929 19:08:41.231626 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:08:41Z","lastTransitionTime":"2025-09-29T19:08:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:08:41 crc kubenswrapper[4779]: I0929 19:08:41.262534 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-42vjg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"046df2ef-fb75-4d32-93e6-17b36af0a7c2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://629d703a556665e86ce3b0a5af995157440c71c8ab7d29b9259865cd624543c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1555e96e70a9119cf2298d91512b1d8cffb303b4327d0e20bdf686e7e36e4461\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0d18ddf7528c789be80d32cd6921ada5ed114d01130b7be97e73ec6d9168267e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b6ffbcb52a6e250cd4b4bc4e8c089ef94babaaf0db836fec9f477450464eebe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://76304a56be996b646d706c0e7df995bb1518962396c7558304b1627e5c215c11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6dcd0277739f6d5e5c90c617485f053c1c3996192f15b081c660741987ec2611\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b07760e693cb3b51af82207169b6c5af762338919293e6d3d2e79a95649bd1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6b07760e693cb3b51af82207169b6c5af762338919293e6d3d2e79a95649bd1f\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T19:08:40Z\\\",\\\"message\\\":\\\"9 19:08:40.383515 6050 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0929 19:08:40.383552 6050 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0929 19:08:40.383581 6050 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0929 19:08:40.383612 6050 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI0929 19:08:40.383640 6050 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI0929 19:08:40.383646 6050 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI0929 19:08:40.383677 6050 handler.go:208] Removed *v1.Pod event handler 6\\\\nI0929 19:08:40.383692 6050 handler.go:208] Removed *v1.Node event handler 2\\\\nI0929 19:08:40.383702 6050 handler.go:208] Removed *v1.Node event handler 7\\\\nI0929 19:08:40.383712 6050 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0929 19:08:40.383721 6050 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI0929 19:08:40.383731 6050 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0929 19:08:40.383740 6050 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0929 19:08:40.383750 6050 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI0929 19:08:40.383759 6050 handler.go:208] Removed *v1.Pod event handler 3\\\\nI0929 19:08:40.383870 6050 factory.go:656] Stopping \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c8993f5be0b77b752f60e7346f609aaeef20db2f0e72cbebde06a11fa7d2f5bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e1e03c63d08ebe2a4b76954140684a5b43da9e92b93490aa7efa715fc2aefb4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e1e03c63d08ebe2a4b76954140684a5b43da9e92b93490aa7efa715fc2aefb4c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:30Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-42vjg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:41Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:41 crc kubenswrapper[4779]: I0929 19:08:41.272847 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-zxpg4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e0db9b28-7e3a-4d44-9e98-1a07c8e5b8d6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2f0f837dfb05912fb4f7b0cb08cdb363cb3bfe2cc91a0bebd1b2b2b8128d78cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6ghv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:33Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-zxpg4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:41Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:41 crc kubenswrapper[4779]: I0929 19:08:41.333555 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:41 crc kubenswrapper[4779]: I0929 19:08:41.333611 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:41 crc kubenswrapper[4779]: I0929 19:08:41.333624 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:41 crc kubenswrapper[4779]: I0929 19:08:41.333644 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:08:41 crc kubenswrapper[4779]: I0929 19:08:41.333657 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:08:41Z","lastTransitionTime":"2025-09-29T19:08:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:08:41 crc kubenswrapper[4779]: I0929 19:08:41.435720 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:41 crc kubenswrapper[4779]: I0929 19:08:41.435780 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:41 crc kubenswrapper[4779]: I0929 19:08:41.435797 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:41 crc kubenswrapper[4779]: I0929 19:08:41.435843 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:08:41 crc kubenswrapper[4779]: I0929 19:08:41.435865 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:08:41Z","lastTransitionTime":"2025-09-29T19:08:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:08:41 crc kubenswrapper[4779]: I0929 19:08:41.537891 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:41 crc kubenswrapper[4779]: I0929 19:08:41.537945 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:41 crc kubenswrapper[4779]: I0929 19:08:41.537955 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:41 crc kubenswrapper[4779]: I0929 19:08:41.537969 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:08:41 crc kubenswrapper[4779]: I0929 19:08:41.537978 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:08:41Z","lastTransitionTime":"2025-09-29T19:08:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:08:41 crc kubenswrapper[4779]: I0929 19:08:41.640255 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:41 crc kubenswrapper[4779]: I0929 19:08:41.640285 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:41 crc kubenswrapper[4779]: I0929 19:08:41.640294 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:41 crc kubenswrapper[4779]: I0929 19:08:41.640306 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:08:41 crc kubenswrapper[4779]: I0929 19:08:41.640334 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:08:41Z","lastTransitionTime":"2025-09-29T19:08:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:08:41 crc kubenswrapper[4779]: I0929 19:08:41.742338 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:41 crc kubenswrapper[4779]: I0929 19:08:41.742376 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:41 crc kubenswrapper[4779]: I0929 19:08:41.742385 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:41 crc kubenswrapper[4779]: I0929 19:08:41.742398 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:08:41 crc kubenswrapper[4779]: I0929 19:08:41.742408 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:08:41Z","lastTransitionTime":"2025-09-29T19:08:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:08:41 crc kubenswrapper[4779]: I0929 19:08:41.844833 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:41 crc kubenswrapper[4779]: I0929 19:08:41.844886 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:41 crc kubenswrapper[4779]: I0929 19:08:41.844903 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:41 crc kubenswrapper[4779]: I0929 19:08:41.844925 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:08:41 crc kubenswrapper[4779]: I0929 19:08:41.844940 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:08:41Z","lastTransitionTime":"2025-09-29T19:08:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:08:41 crc kubenswrapper[4779]: I0929 19:08:41.947361 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:41 crc kubenswrapper[4779]: I0929 19:08:41.947403 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:41 crc kubenswrapper[4779]: I0929 19:08:41.947413 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:41 crc kubenswrapper[4779]: I0929 19:08:41.947426 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:08:41 crc kubenswrapper[4779]: I0929 19:08:41.947436 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:08:41Z","lastTransitionTime":"2025-09-29T19:08:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:08:42 crc kubenswrapper[4779]: I0929 19:08:42.008666 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-42vjg_046df2ef-fb75-4d32-93e6-17b36af0a7c2/ovnkube-controller/1.log" Sep 29 19:08:42 crc kubenswrapper[4779]: I0929 19:08:42.009940 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-42vjg_046df2ef-fb75-4d32-93e6-17b36af0a7c2/ovnkube-controller/0.log" Sep 29 19:08:42 crc kubenswrapper[4779]: I0929 19:08:42.013149 4779 generic.go:334] "Generic (PLEG): container finished" podID="046df2ef-fb75-4d32-93e6-17b36af0a7c2" containerID="418a68d2ab745fecfe78084a541301c6af31b6ece2ccb58d61be58b2989235ba" exitCode=1 Sep 29 19:08:42 crc kubenswrapper[4779]: I0929 19:08:42.013197 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-42vjg" event={"ID":"046df2ef-fb75-4d32-93e6-17b36af0a7c2","Type":"ContainerDied","Data":"418a68d2ab745fecfe78084a541301c6af31b6ece2ccb58d61be58b2989235ba"} Sep 29 19:08:42 crc kubenswrapper[4779]: I0929 19:08:42.013243 4779 scope.go:117] "RemoveContainer" containerID="6b07760e693cb3b51af82207169b6c5af762338919293e6d3d2e79a95649bd1f" Sep 29 19:08:42 crc kubenswrapper[4779]: I0929 19:08:42.015971 4779 scope.go:117] "RemoveContainer" containerID="418a68d2ab745fecfe78084a541301c6af31b6ece2ccb58d61be58b2989235ba" Sep 29 19:08:42 crc kubenswrapper[4779]: E0929 19:08:42.016272 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-42vjg_openshift-ovn-kubernetes(046df2ef-fb75-4d32-93e6-17b36af0a7c2)\"" pod="openshift-ovn-kubernetes/ovnkube-node-42vjg" podUID="046df2ef-fb75-4d32-93e6-17b36af0a7c2" Sep 29 19:08:42 crc kubenswrapper[4779]: I0929 19:08:42.030032 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"49dd46af-a7fa-47b0-94ab-c9a999564fca\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a67efc46aeb5956568b680b4041d83a55c0410ccefdb4e43b95503c6416e1dfc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2253a3406f6de9463c1c615ad2ac76ce935067baa57b260f18afdb8d9e639e95\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa9b804f65dc9265342d6ec10b8cdd31d49f692289c7fd8292944e25a2c95ebe\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://02428a90ed2dd1c36d800ffda10ed8759d92a87ec8f467cdf9fb11511b9b9420\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:09Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:42Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:42 crc kubenswrapper[4779]: I0929 19:08:42.047521 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ef5a50f788b42b32f4ec04eedc9f5c9dbcd07f64b430ec684d8a91b414e6c18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb910a5c408a63f6c4391cf2fae320e14d6dc945125b53eefde3b666ef9d0754\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:42Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:42 crc kubenswrapper[4779]: I0929 19:08:42.049477 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:42 crc kubenswrapper[4779]: I0929 19:08:42.049510 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:42 crc kubenswrapper[4779]: I0929 19:08:42.049523 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:42 crc kubenswrapper[4779]: I0929 19:08:42.049539 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:08:42 crc kubenswrapper[4779]: I0929 19:08:42.049552 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:08:42Z","lastTransitionTime":"2025-09-29T19:08:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:08:42 crc kubenswrapper[4779]: I0929 19:08:42.062164 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://15b9c6a3d7c79b30673cd8040a9f3033e2405cf2e29b38d8ba2bee9cbb384794\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:42Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:42 crc kubenswrapper[4779]: I0929 19:08:42.078065 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jfbb6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3ac24bbf-c37a-4253-be71-8d8f15cfd48e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2ea6959ae00f851559cc059498a3a7848fbbb55f6a25ed82e38efe62a7ee85ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8ckjq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:30Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jfbb6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:42Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:42 crc kubenswrapper[4779]: I0929 19:08:42.094227 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-cvx8m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"54a33b8e-b623-4f91-be1d-a38dfcef17d7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://49599fb872eb458cb3fd77da63518f77653f671e82a8fc1db5f9e2b64e3c4d1d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e697e7c54a1b8adfde2dfa86399bfd13d895196c12108ac625bd9b94ad913d1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e697e7c54a1b8adfde2dfa86399bfd13d895196c12108ac625bd9b94ad913d1e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://75932bd27c986408eb2a3ec9aefffa392bf1ede4be0ddeabb9fdcd5fcc56199d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://75932bd27c986408eb2a3ec9aefffa392bf1ede4be0ddeabb9fdcd5fcc56199d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e058271bf65bb6f216298a9fc58b97c76335275829be7e5bec59d92688e83a05\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e058271bf65bb6f216298a9fc58b97c76335275829be7e5bec59d92688e83a05\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://edec2f7ae27e72dcc2eac62281dc9b866cdc31ecbda5e24020043aad86a44784\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://edec2f7ae27e72dcc2eac62281dc9b866cdc31ecbda5e24020043aad86a44784\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1750034ffcb61628b437e7b4f604be66ea6401d0ddb8a70f37fbb358c261670f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1750034ffcb61628b437e7b4f604be66ea6401d0ddb8a70f37fbb358c261670f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3409883fc5d9ed6b5e08d50c9d2821a4ac0b8932c9a0296c12065b0abb0354d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3409883fc5d9ed6b5e08d50c9d2821a4ac0b8932c9a0296c12065b0abb0354d6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:30Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-cvx8m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:42Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:42 crc kubenswrapper[4779]: I0929 19:08:42.109459 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2f701cb-2e7d-4cea-8455-f68605964ac6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dfa32890cde2a0a47211239f797f83380b4418ad2b8fb1560c779705e68df39c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a017f2621de03e0184a0a168819a6762e4b36a1ae5c66a6bcfd1d65266fcb52\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a9691127cc0437ceebef7a82219adb83625690db4d6bf5cf1c8177a6a9e1284\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b14ae0b08f077b3acbe802e4a5ec06dd10075100c37d5587119a09a92b4651a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b14ae0b08f077b3acbe802e4a5ec06dd10075100c37d5587119a09a92b4651a\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T19:08:29Z\\\",\\\"message\\\":\\\"le observer\\\\nW0929 19:08:29.408025 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0929 19:08:29.408423 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 19:08:29.411188 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1038548135/tls.crt::/tmp/serving-cert-1038548135/tls.key\\\\\\\"\\\\nI0929 19:08:29.848353 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0929 19:08:29.866865 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0929 19:08:29.866980 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0929 19:08:29.867032 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0929 19:08:29.867061 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0929 19:08:29.872666 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0929 19:08:29.872766 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0929 19:08:29.872793 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0929 19:08:29.872817 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0929 19:08:29.872839 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0929 19:08:29.872861 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0929 19:08:29.872883 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0929 19:08:29.873077 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0929 19:08:29.875629 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:24Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f0916d37ad679d190403229b40390af2860e24f777a07cd5ef0ac2f7d3fc3c41\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5a737775c31cf8dc37838d904ee00f1075bc2aa52bea63587806cb7486950eaf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5a737775c31cf8dc37838d904ee00f1075bc2aa52bea63587806cb7486950eaf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:09Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:42Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:42 crc kubenswrapper[4779]: I0929 19:08:42.121683 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:42Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:42 crc kubenswrapper[4779]: I0929 19:08:42.142075 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-42vjg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"046df2ef-fb75-4d32-93e6-17b36af0a7c2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://629d703a556665e86ce3b0a5af995157440c71c8ab7d29b9259865cd624543c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1555e96e70a9119cf2298d91512b1d8cffb303b4327d0e20bdf686e7e36e4461\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0d18ddf7528c789be80d32cd6921ada5ed114d01130b7be97e73ec6d9168267e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b6ffbcb52a6e250cd4b4bc4e8c089ef94babaaf0db836fec9f477450464eebe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://76304a56be996b646d706c0e7df995bb1518962396c7558304b1627e5c215c11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6dcd0277739f6d5e5c90c617485f053c1c3996192f15b081c660741987ec2611\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://418a68d2ab745fecfe78084a541301c6af31b6ece2ccb58d61be58b2989235ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6b07760e693cb3b51af82207169b6c5af762338919293e6d3d2e79a95649bd1f\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T19:08:40Z\\\",\\\"message\\\":\\\"9 19:08:40.383515 6050 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0929 19:08:40.383552 6050 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0929 19:08:40.383581 6050 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0929 19:08:40.383612 6050 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI0929 19:08:40.383640 6050 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI0929 19:08:40.383646 6050 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI0929 19:08:40.383677 6050 handler.go:208] Removed *v1.Pod event handler 6\\\\nI0929 19:08:40.383692 6050 handler.go:208] Removed *v1.Node event handler 2\\\\nI0929 19:08:40.383702 6050 handler.go:208] Removed *v1.Node event handler 7\\\\nI0929 19:08:40.383712 6050 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0929 19:08:40.383721 6050 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI0929 19:08:40.383731 6050 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0929 19:08:40.383740 6050 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0929 19:08:40.383750 6050 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI0929 19:08:40.383759 6050 handler.go:208] Removed *v1.Pod event handler 3\\\\nI0929 19:08:40.383870 6050 factory.go:656] Stopping \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:37Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://418a68d2ab745fecfe78084a541301c6af31b6ece2ccb58d61be58b2989235ba\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T19:08:41Z\\\",\\\"message\\\":\\\"rr: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:41Z is after 2025-08-24T17:21:41Z]\\\\nI0929 19:08:41.905972 6175 model_client.go:382] Update operations generated as: [{Op:update Table:NAT Row:map[external_ip:192.168.126.11 logical_ip:10.217.0.92 options:{GoMap:map[stateless:false]} type:snat] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {73135118-cf1b-4568-bd31-2f50308bf69d}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI0929 19:08:41.905727 6175 services_controller.go:434] Service default/kubernetes retrieved from lister for network=default: \\\\u0026Service{ObjectMeta:{kubernetes default 1fcaffea-cfe2-4295-9c2a-a3b3626fb3f1 259 0 2025-02-23 05:11:12 +0000 UTC \\\\u003cnil\\\\u003e \\\\u003cnil\\\\u003e map[component:apiserver provider:kubernetes] map[] [] [] []},Spec:Servic\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c8993f5be0b77b752f60e7346f609aaeef20db2f0e72cbebde06a11fa7d2f5bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e1e03c63d08ebe2a4b76954140684a5b43da9e92b93490aa7efa715fc2aefb4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e1e03c63d08ebe2a4b76954140684a5b43da9e92b93490aa7efa715fc2aefb4c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:30Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-42vjg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:42Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:42 crc kubenswrapper[4779]: I0929 19:08:42.152055 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:42 crc kubenswrapper[4779]: I0929 19:08:42.152096 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:42 crc kubenswrapper[4779]: I0929 19:08:42.152111 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:42 crc kubenswrapper[4779]: I0929 19:08:42.152129 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:08:42 crc kubenswrapper[4779]: I0929 19:08:42.152141 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:08:42Z","lastTransitionTime":"2025-09-29T19:08:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:08:42 crc kubenswrapper[4779]: I0929 19:08:42.153859 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-zxpg4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e0db9b28-7e3a-4d44-9e98-1a07c8e5b8d6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2f0f837dfb05912fb4f7b0cb08cdb363cb3bfe2cc91a0bebd1b2b2b8128d78cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6ghv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:33Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-zxpg4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:42Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:42 crc kubenswrapper[4779]: I0929 19:08:42.165776 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-7hb2m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"45b89d12-bbd2-4b47-815d-a7421cc1aa00\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://002d12663649198e6136f3e9df16a3b1cd8c64906bf39bde03cd0dde8c36abbc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bvn4g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:30Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-7hb2m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:42Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:42 crc kubenswrapper[4779]: I0929 19:08:42.178170 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:42Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:42 crc kubenswrapper[4779]: I0929 19:08:42.192595 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:42Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:42 crc kubenswrapper[4779]: I0929 19:08:42.210856 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://04d9dd8d319497dd3ed7c565217d8b9d228a1952a3cd0e8dee3cdad781c5770a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:42Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:42 crc kubenswrapper[4779]: I0929 19:08:42.224127 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"476bc421-1113-455e-bcc8-e207e47dad19\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://30844145987b3dae28d2ca3f75a36e21c27216b267ff40fdb83094b8045403c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tb7zk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7168ecfffda2ef4f7782650466ccebb8abcff3293fcd33d44eb4ee24abf6339c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tb7zk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:30Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-d5cnr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:42Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:42 crc kubenswrapper[4779]: I0929 19:08:42.255156 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:42 crc kubenswrapper[4779]: I0929 19:08:42.255194 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:42 crc kubenswrapper[4779]: I0929 19:08:42.255205 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:42 crc kubenswrapper[4779]: I0929 19:08:42.255224 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:08:42 crc kubenswrapper[4779]: I0929 19:08:42.255235 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:08:42Z","lastTransitionTime":"2025-09-29T19:08:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:08:42 crc kubenswrapper[4779]: I0929 19:08:42.358471 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:42 crc kubenswrapper[4779]: I0929 19:08:42.358810 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:42 crc kubenswrapper[4779]: I0929 19:08:42.358920 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:42 crc kubenswrapper[4779]: I0929 19:08:42.359051 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:08:42 crc kubenswrapper[4779]: I0929 19:08:42.359165 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:08:42Z","lastTransitionTime":"2025-09-29T19:08:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:08:42 crc kubenswrapper[4779]: I0929 19:08:42.462720 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:42 crc kubenswrapper[4779]: I0929 19:08:42.463117 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:42 crc kubenswrapper[4779]: I0929 19:08:42.463271 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:42 crc kubenswrapper[4779]: I0929 19:08:42.463478 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:08:42 crc kubenswrapper[4779]: I0929 19:08:42.463620 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:08:42Z","lastTransitionTime":"2025-09-29T19:08:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:08:42 crc kubenswrapper[4779]: I0929 19:08:42.567479 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:42 crc kubenswrapper[4779]: I0929 19:08:42.567805 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:42 crc kubenswrapper[4779]: I0929 19:08:42.568027 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:42 crc kubenswrapper[4779]: I0929 19:08:42.568228 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:08:42 crc kubenswrapper[4779]: I0929 19:08:42.568399 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:08:42Z","lastTransitionTime":"2025-09-29T19:08:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:08:42 crc kubenswrapper[4779]: I0929 19:08:42.673227 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:42 crc kubenswrapper[4779]: I0929 19:08:42.673354 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:42 crc kubenswrapper[4779]: I0929 19:08:42.673402 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:42 crc kubenswrapper[4779]: I0929 19:08:42.673447 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:08:42 crc kubenswrapper[4779]: I0929 19:08:42.673470 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:08:42Z","lastTransitionTime":"2025-09-29T19:08:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:08:42 crc kubenswrapper[4779]: I0929 19:08:42.765387 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 19:08:42 crc kubenswrapper[4779]: I0929 19:08:42.765387 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 19:08:42 crc kubenswrapper[4779]: E0929 19:08:42.765588 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 19:08:42 crc kubenswrapper[4779]: E0929 19:08:42.765908 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 19:08:42 crc kubenswrapper[4779]: I0929 19:08:42.765410 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 19:08:42 crc kubenswrapper[4779]: E0929 19:08:42.766023 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 19:08:42 crc kubenswrapper[4779]: I0929 19:08:42.777543 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:42 crc kubenswrapper[4779]: I0929 19:08:42.777623 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:42 crc kubenswrapper[4779]: I0929 19:08:42.777644 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:42 crc kubenswrapper[4779]: I0929 19:08:42.777672 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:08:42 crc kubenswrapper[4779]: I0929 19:08:42.777693 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:08:42Z","lastTransitionTime":"2025-09-29T19:08:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:08:42 crc kubenswrapper[4779]: I0929 19:08:42.880732 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:42 crc kubenswrapper[4779]: I0929 19:08:42.881141 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:42 crc kubenswrapper[4779]: I0929 19:08:42.881210 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:42 crc kubenswrapper[4779]: I0929 19:08:42.881277 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:08:42 crc kubenswrapper[4779]: I0929 19:08:42.881403 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:08:42Z","lastTransitionTime":"2025-09-29T19:08:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:08:42 crc kubenswrapper[4779]: I0929 19:08:42.984218 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:42 crc kubenswrapper[4779]: I0929 19:08:42.984262 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:42 crc kubenswrapper[4779]: I0929 19:08:42.984272 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:42 crc kubenswrapper[4779]: I0929 19:08:42.984290 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:08:42 crc kubenswrapper[4779]: I0929 19:08:42.984300 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:08:42Z","lastTransitionTime":"2025-09-29T19:08:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:08:43 crc kubenswrapper[4779]: I0929 19:08:43.018350 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-42vjg_046df2ef-fb75-4d32-93e6-17b36af0a7c2/ovnkube-controller/1.log" Sep 29 19:08:43 crc kubenswrapper[4779]: I0929 19:08:43.087136 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:43 crc kubenswrapper[4779]: I0929 19:08:43.087203 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:43 crc kubenswrapper[4779]: I0929 19:08:43.087220 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:43 crc kubenswrapper[4779]: I0929 19:08:43.087246 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:08:43 crc kubenswrapper[4779]: I0929 19:08:43.087262 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:08:43Z","lastTransitionTime":"2025-09-29T19:08:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:08:43 crc kubenswrapper[4779]: I0929 19:08:43.190547 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:43 crc kubenswrapper[4779]: I0929 19:08:43.190627 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:43 crc kubenswrapper[4779]: I0929 19:08:43.190640 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:43 crc kubenswrapper[4779]: I0929 19:08:43.190659 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:08:43 crc kubenswrapper[4779]: I0929 19:08:43.190672 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:08:43Z","lastTransitionTime":"2025-09-29T19:08:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:08:43 crc kubenswrapper[4779]: I0929 19:08:43.293501 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:43 crc kubenswrapper[4779]: I0929 19:08:43.293556 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:43 crc kubenswrapper[4779]: I0929 19:08:43.293573 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:43 crc kubenswrapper[4779]: I0929 19:08:43.293598 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:08:43 crc kubenswrapper[4779]: I0929 19:08:43.293617 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:08:43Z","lastTransitionTime":"2025-09-29T19:08:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:08:43 crc kubenswrapper[4779]: I0929 19:08:43.397117 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:43 crc kubenswrapper[4779]: I0929 19:08:43.397176 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:43 crc kubenswrapper[4779]: I0929 19:08:43.397193 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:43 crc kubenswrapper[4779]: I0929 19:08:43.397216 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:08:43 crc kubenswrapper[4779]: I0929 19:08:43.397233 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:08:43Z","lastTransitionTime":"2025-09-29T19:08:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:08:43 crc kubenswrapper[4779]: I0929 19:08:43.500365 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:43 crc kubenswrapper[4779]: I0929 19:08:43.500430 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:43 crc kubenswrapper[4779]: I0929 19:08:43.500455 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:43 crc kubenswrapper[4779]: I0929 19:08:43.500484 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:08:43 crc kubenswrapper[4779]: I0929 19:08:43.500502 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:08:43Z","lastTransitionTime":"2025-09-29T19:08:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:08:43 crc kubenswrapper[4779]: I0929 19:08:43.603281 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:43 crc kubenswrapper[4779]: I0929 19:08:43.603356 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:43 crc kubenswrapper[4779]: I0929 19:08:43.603372 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:43 crc kubenswrapper[4779]: I0929 19:08:43.603390 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:08:43 crc kubenswrapper[4779]: I0929 19:08:43.603401 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:08:43Z","lastTransitionTime":"2025-09-29T19:08:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:08:43 crc kubenswrapper[4779]: I0929 19:08:43.705846 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:43 crc kubenswrapper[4779]: I0929 19:08:43.705886 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:43 crc kubenswrapper[4779]: I0929 19:08:43.705894 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:43 crc kubenswrapper[4779]: I0929 19:08:43.705910 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:08:43 crc kubenswrapper[4779]: I0929 19:08:43.705921 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:08:43Z","lastTransitionTime":"2025-09-29T19:08:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:08:43 crc kubenswrapper[4779]: I0929 19:08:43.808067 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:43 crc kubenswrapper[4779]: I0929 19:08:43.808116 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:43 crc kubenswrapper[4779]: I0929 19:08:43.808129 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:43 crc kubenswrapper[4779]: I0929 19:08:43.808145 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:08:43 crc kubenswrapper[4779]: I0929 19:08:43.808156 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:08:43Z","lastTransitionTime":"2025-09-29T19:08:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:08:43 crc kubenswrapper[4779]: I0929 19:08:43.911199 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:43 crc kubenswrapper[4779]: I0929 19:08:43.911272 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:43 crc kubenswrapper[4779]: I0929 19:08:43.911291 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:43 crc kubenswrapper[4779]: I0929 19:08:43.911351 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:08:43 crc kubenswrapper[4779]: I0929 19:08:43.911371 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:08:43Z","lastTransitionTime":"2025-09-29T19:08:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:08:43 crc kubenswrapper[4779]: I0929 19:08:43.989633 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fvdtc"] Sep 29 19:08:43 crc kubenswrapper[4779]: I0929 19:08:43.990071 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fvdtc" Sep 29 19:08:43 crc kubenswrapper[4779]: I0929 19:08:43.993519 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-control-plane-dockercfg-gs7dd" Sep 29 19:08:43 crc kubenswrapper[4779]: I0929 19:08:43.995171 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-control-plane-metrics-cert" Sep 29 19:08:44 crc kubenswrapper[4779]: I0929 19:08:44.011540 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"49dd46af-a7fa-47b0-94ab-c9a999564fca\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a67efc46aeb5956568b680b4041d83a55c0410ccefdb4e43b95503c6416e1dfc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2253a3406f6de9463c1c615ad2ac76ce935067baa57b260f18afdb8d9e639e95\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa9b804f65dc9265342d6ec10b8cdd31d49f692289c7fd8292944e25a2c95ebe\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://02428a90ed2dd1c36d800ffda10ed8759d92a87ec8f467cdf9fb11511b9b9420\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:09Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:44Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:44 crc kubenswrapper[4779]: I0929 19:08:44.014282 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:44 crc kubenswrapper[4779]: I0929 19:08:44.014405 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:44 crc kubenswrapper[4779]: I0929 19:08:44.014433 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:44 crc kubenswrapper[4779]: I0929 19:08:44.014466 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:08:44 crc kubenswrapper[4779]: I0929 19:08:44.014493 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:08:44Z","lastTransitionTime":"2025-09-29T19:08:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:08:44 crc kubenswrapper[4779]: I0929 19:08:44.030015 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ef5a50f788b42b32f4ec04eedc9f5c9dbcd07f64b430ec684d8a91b414e6c18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb910a5c408a63f6c4391cf2fae320e14d6dc945125b53eefde3b666ef9d0754\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:44Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:44 crc kubenswrapper[4779]: I0929 19:08:44.044065 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7cxcv\" (UniqueName: \"kubernetes.io/projected/97c4781c-2d4b-4eab-96fb-39a342c2d4a0-kube-api-access-7cxcv\") pod \"ovnkube-control-plane-749d76644c-fvdtc\" (UID: \"97c4781c-2d4b-4eab-96fb-39a342c2d4a0\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fvdtc" Sep 29 19:08:44 crc kubenswrapper[4779]: I0929 19:08:44.044220 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/97c4781c-2d4b-4eab-96fb-39a342c2d4a0-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-fvdtc\" (UID: \"97c4781c-2d4b-4eab-96fb-39a342c2d4a0\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fvdtc" Sep 29 19:08:44 crc kubenswrapper[4779]: I0929 19:08:44.044277 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/97c4781c-2d4b-4eab-96fb-39a342c2d4a0-env-overrides\") pod \"ovnkube-control-plane-749d76644c-fvdtc\" (UID: \"97c4781c-2d4b-4eab-96fb-39a342c2d4a0\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fvdtc" Sep 29 19:08:44 crc kubenswrapper[4779]: I0929 19:08:44.044372 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/97c4781c-2d4b-4eab-96fb-39a342c2d4a0-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-fvdtc\" (UID: \"97c4781c-2d4b-4eab-96fb-39a342c2d4a0\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fvdtc" Sep 29 19:08:44 crc kubenswrapper[4779]: I0929 19:08:44.047562 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://15b9c6a3d7c79b30673cd8040a9f3033e2405cf2e29b38d8ba2bee9cbb384794\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:44Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:44 crc kubenswrapper[4779]: I0929 19:08:44.061676 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jfbb6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3ac24bbf-c37a-4253-be71-8d8f15cfd48e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2ea6959ae00f851559cc059498a3a7848fbbb55f6a25ed82e38efe62a7ee85ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8ckjq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:30Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jfbb6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:44Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:44 crc kubenswrapper[4779]: I0929 19:08:44.076689 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-cvx8m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"54a33b8e-b623-4f91-be1d-a38dfcef17d7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://49599fb872eb458cb3fd77da63518f77653f671e82a8fc1db5f9e2b64e3c4d1d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e697e7c54a1b8adfde2dfa86399bfd13d895196c12108ac625bd9b94ad913d1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e697e7c54a1b8adfde2dfa86399bfd13d895196c12108ac625bd9b94ad913d1e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://75932bd27c986408eb2a3ec9aefffa392bf1ede4be0ddeabb9fdcd5fcc56199d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://75932bd27c986408eb2a3ec9aefffa392bf1ede4be0ddeabb9fdcd5fcc56199d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e058271bf65bb6f216298a9fc58b97c76335275829be7e5bec59d92688e83a05\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e058271bf65bb6f216298a9fc58b97c76335275829be7e5bec59d92688e83a05\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://edec2f7ae27e72dcc2eac62281dc9b866cdc31ecbda5e24020043aad86a44784\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://edec2f7ae27e72dcc2eac62281dc9b866cdc31ecbda5e24020043aad86a44784\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1750034ffcb61628b437e7b4f604be66ea6401d0ddb8a70f37fbb358c261670f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1750034ffcb61628b437e7b4f604be66ea6401d0ddb8a70f37fbb358c261670f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3409883fc5d9ed6b5e08d50c9d2821a4ac0b8932c9a0296c12065b0abb0354d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3409883fc5d9ed6b5e08d50c9d2821a4ac0b8932c9a0296c12065b0abb0354d6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:30Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-cvx8m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:44Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:44 crc kubenswrapper[4779]: I0929 19:08:44.093007 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2f701cb-2e7d-4cea-8455-f68605964ac6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dfa32890cde2a0a47211239f797f83380b4418ad2b8fb1560c779705e68df39c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a017f2621de03e0184a0a168819a6762e4b36a1ae5c66a6bcfd1d65266fcb52\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a9691127cc0437ceebef7a82219adb83625690db4d6bf5cf1c8177a6a9e1284\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b14ae0b08f077b3acbe802e4a5ec06dd10075100c37d5587119a09a92b4651a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b14ae0b08f077b3acbe802e4a5ec06dd10075100c37d5587119a09a92b4651a\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T19:08:29Z\\\",\\\"message\\\":\\\"le observer\\\\nW0929 19:08:29.408025 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0929 19:08:29.408423 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 19:08:29.411188 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1038548135/tls.crt::/tmp/serving-cert-1038548135/tls.key\\\\\\\"\\\\nI0929 19:08:29.848353 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0929 19:08:29.866865 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0929 19:08:29.866980 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0929 19:08:29.867032 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0929 19:08:29.867061 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0929 19:08:29.872666 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0929 19:08:29.872766 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0929 19:08:29.872793 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0929 19:08:29.872817 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0929 19:08:29.872839 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0929 19:08:29.872861 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0929 19:08:29.872883 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0929 19:08:29.873077 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0929 19:08:29.875629 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:24Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f0916d37ad679d190403229b40390af2860e24f777a07cd5ef0ac2f7d3fc3c41\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5a737775c31cf8dc37838d904ee00f1075bc2aa52bea63587806cb7486950eaf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5a737775c31cf8dc37838d904ee00f1075bc2aa52bea63587806cb7486950eaf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:09Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:44Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:44 crc kubenswrapper[4779]: I0929 19:08:44.109310 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:44Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:44 crc kubenswrapper[4779]: I0929 19:08:44.116155 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:44 crc kubenswrapper[4779]: I0929 19:08:44.116359 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:44 crc kubenswrapper[4779]: I0929 19:08:44.116381 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:44 crc kubenswrapper[4779]: I0929 19:08:44.116395 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:08:44 crc kubenswrapper[4779]: I0929 19:08:44.116409 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:08:44Z","lastTransitionTime":"2025-09-29T19:08:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:08:44 crc kubenswrapper[4779]: I0929 19:08:44.142304 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-42vjg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"046df2ef-fb75-4d32-93e6-17b36af0a7c2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://629d703a556665e86ce3b0a5af995157440c71c8ab7d29b9259865cd624543c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1555e96e70a9119cf2298d91512b1d8cffb303b4327d0e20bdf686e7e36e4461\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0d18ddf7528c789be80d32cd6921ada5ed114d01130b7be97e73ec6d9168267e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b6ffbcb52a6e250cd4b4bc4e8c089ef94babaaf0db836fec9f477450464eebe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://76304a56be996b646d706c0e7df995bb1518962396c7558304b1627e5c215c11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6dcd0277739f6d5e5c90c617485f053c1c3996192f15b081c660741987ec2611\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://418a68d2ab745fecfe78084a541301c6af31b6ece2ccb58d61be58b2989235ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6b07760e693cb3b51af82207169b6c5af762338919293e6d3d2e79a95649bd1f\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T19:08:40Z\\\",\\\"message\\\":\\\"9 19:08:40.383515 6050 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0929 19:08:40.383552 6050 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0929 19:08:40.383581 6050 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0929 19:08:40.383612 6050 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI0929 19:08:40.383640 6050 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI0929 19:08:40.383646 6050 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI0929 19:08:40.383677 6050 handler.go:208] Removed *v1.Pod event handler 6\\\\nI0929 19:08:40.383692 6050 handler.go:208] Removed *v1.Node event handler 2\\\\nI0929 19:08:40.383702 6050 handler.go:208] Removed *v1.Node event handler 7\\\\nI0929 19:08:40.383712 6050 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0929 19:08:40.383721 6050 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI0929 19:08:40.383731 6050 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0929 19:08:40.383740 6050 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0929 19:08:40.383750 6050 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI0929 19:08:40.383759 6050 handler.go:208] Removed *v1.Pod event handler 3\\\\nI0929 19:08:40.383870 6050 factory.go:656] Stopping \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:37Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://418a68d2ab745fecfe78084a541301c6af31b6ece2ccb58d61be58b2989235ba\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T19:08:41Z\\\",\\\"message\\\":\\\"rr: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:41Z is after 2025-08-24T17:21:41Z]\\\\nI0929 19:08:41.905972 6175 model_client.go:382] Update operations generated as: [{Op:update Table:NAT Row:map[external_ip:192.168.126.11 logical_ip:10.217.0.92 options:{GoMap:map[stateless:false]} type:snat] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {73135118-cf1b-4568-bd31-2f50308bf69d}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI0929 19:08:41.905727 6175 services_controller.go:434] Service default/kubernetes retrieved from lister for network=default: \\\\u0026Service{ObjectMeta:{kubernetes default 1fcaffea-cfe2-4295-9c2a-a3b3626fb3f1 259 0 2025-02-23 05:11:12 +0000 UTC \\\\u003cnil\\\\u003e \\\\u003cnil\\\\u003e map[component:apiserver provider:kubernetes] map[] [] [] []},Spec:Servic\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c8993f5be0b77b752f60e7346f609aaeef20db2f0e72cbebde06a11fa7d2f5bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e1e03c63d08ebe2a4b76954140684a5b43da9e92b93490aa7efa715fc2aefb4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e1e03c63d08ebe2a4b76954140684a5b43da9e92b93490aa7efa715fc2aefb4c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:30Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-42vjg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:44Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:44 crc kubenswrapper[4779]: I0929 19:08:44.145662 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/97c4781c-2d4b-4eab-96fb-39a342c2d4a0-env-overrides\") pod \"ovnkube-control-plane-749d76644c-fvdtc\" (UID: \"97c4781c-2d4b-4eab-96fb-39a342c2d4a0\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fvdtc" Sep 29 19:08:44 crc kubenswrapper[4779]: I0929 19:08:44.145909 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/97c4781c-2d4b-4eab-96fb-39a342c2d4a0-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-fvdtc\" (UID: \"97c4781c-2d4b-4eab-96fb-39a342c2d4a0\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fvdtc" Sep 29 19:08:44 crc kubenswrapper[4779]: I0929 19:08:44.146228 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7cxcv\" (UniqueName: \"kubernetes.io/projected/97c4781c-2d4b-4eab-96fb-39a342c2d4a0-kube-api-access-7cxcv\") pod \"ovnkube-control-plane-749d76644c-fvdtc\" (UID: \"97c4781c-2d4b-4eab-96fb-39a342c2d4a0\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fvdtc" Sep 29 19:08:44 crc kubenswrapper[4779]: I0929 19:08:44.146464 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/97c4781c-2d4b-4eab-96fb-39a342c2d4a0-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-fvdtc\" (UID: \"97c4781c-2d4b-4eab-96fb-39a342c2d4a0\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fvdtc" Sep 29 19:08:44 crc kubenswrapper[4779]: I0929 19:08:44.146249 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/97c4781c-2d4b-4eab-96fb-39a342c2d4a0-env-overrides\") pod \"ovnkube-control-plane-749d76644c-fvdtc\" (UID: \"97c4781c-2d4b-4eab-96fb-39a342c2d4a0\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fvdtc" Sep 29 19:08:44 crc kubenswrapper[4779]: I0929 19:08:44.146828 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/97c4781c-2d4b-4eab-96fb-39a342c2d4a0-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-fvdtc\" (UID: \"97c4781c-2d4b-4eab-96fb-39a342c2d4a0\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fvdtc" Sep 29 19:08:44 crc kubenswrapper[4779]: I0929 19:08:44.153089 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/97c4781c-2d4b-4eab-96fb-39a342c2d4a0-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-fvdtc\" (UID: \"97c4781c-2d4b-4eab-96fb-39a342c2d4a0\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fvdtc" Sep 29 19:08:44 crc kubenswrapper[4779]: I0929 19:08:44.156406 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-zxpg4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e0db9b28-7e3a-4d44-9e98-1a07c8e5b8d6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2f0f837dfb05912fb4f7b0cb08cdb363cb3bfe2cc91a0bebd1b2b2b8128d78cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6ghv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:33Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-zxpg4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:44Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:44 crc kubenswrapper[4779]: I0929 19:08:44.166199 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7cxcv\" (UniqueName: \"kubernetes.io/projected/97c4781c-2d4b-4eab-96fb-39a342c2d4a0-kube-api-access-7cxcv\") pod \"ovnkube-control-plane-749d76644c-fvdtc\" (UID: \"97c4781c-2d4b-4eab-96fb-39a342c2d4a0\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fvdtc" Sep 29 19:08:44 crc kubenswrapper[4779]: I0929 19:08:44.170310 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-7hb2m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"45b89d12-bbd2-4b47-815d-a7421cc1aa00\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://002d12663649198e6136f3e9df16a3b1cd8c64906bf39bde03cd0dde8c36abbc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bvn4g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:30Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-7hb2m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:44Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:44 crc kubenswrapper[4779]: I0929 19:08:44.187264 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:44Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:44 crc kubenswrapper[4779]: I0929 19:08:44.203038 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fvdtc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"97c4781c-2d4b-4eab-96fb-39a342c2d4a0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7cxcv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7cxcv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:43Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-fvdtc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:44Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:44 crc kubenswrapper[4779]: I0929 19:08:44.223463 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:44 crc kubenswrapper[4779]: I0929 19:08:44.223507 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:44 crc kubenswrapper[4779]: I0929 19:08:44.223522 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:44 crc kubenswrapper[4779]: I0929 19:08:44.223543 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:08:44 crc kubenswrapper[4779]: I0929 19:08:44.223559 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:08:44Z","lastTransitionTime":"2025-09-29T19:08:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:08:44 crc kubenswrapper[4779]: I0929 19:08:44.225723 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:44Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:44 crc kubenswrapper[4779]: I0929 19:08:44.248923 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://04d9dd8d319497dd3ed7c565217d8b9d228a1952a3cd0e8dee3cdad781c5770a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:44Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:44 crc kubenswrapper[4779]: I0929 19:08:44.262204 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"476bc421-1113-455e-bcc8-e207e47dad19\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://30844145987b3dae28d2ca3f75a36e21c27216b267ff40fdb83094b8045403c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tb7zk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7168ecfffda2ef4f7782650466ccebb8abcff3293fcd33d44eb4ee24abf6339c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tb7zk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:30Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-d5cnr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:44Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:44 crc kubenswrapper[4779]: I0929 19:08:44.306606 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fvdtc" Sep 29 19:08:44 crc kubenswrapper[4779]: I0929 19:08:44.326355 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:44 crc kubenswrapper[4779]: I0929 19:08:44.326407 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:44 crc kubenswrapper[4779]: I0929 19:08:44.326422 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:44 crc kubenswrapper[4779]: I0929 19:08:44.326444 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:08:44 crc kubenswrapper[4779]: I0929 19:08:44.326460 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:08:44Z","lastTransitionTime":"2025-09-29T19:08:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:08:44 crc kubenswrapper[4779]: W0929 19:08:44.327165 4779 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod97c4781c_2d4b_4eab_96fb_39a342c2d4a0.slice/crio-8453bdaee3e644210a2c5ef10df8a01e689172c36fdc595fa11078b46001dd3e WatchSource:0}: Error finding container 8453bdaee3e644210a2c5ef10df8a01e689172c36fdc595fa11078b46001dd3e: Status 404 returned error can't find the container with id 8453bdaee3e644210a2c5ef10df8a01e689172c36fdc595fa11078b46001dd3e Sep 29 19:08:44 crc kubenswrapper[4779]: I0929 19:08:44.429092 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:44 crc kubenswrapper[4779]: I0929 19:08:44.429142 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:44 crc kubenswrapper[4779]: I0929 19:08:44.429157 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:44 crc kubenswrapper[4779]: I0929 19:08:44.429173 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:08:44 crc kubenswrapper[4779]: I0929 19:08:44.429185 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:08:44Z","lastTransitionTime":"2025-09-29T19:08:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:08:44 crc kubenswrapper[4779]: I0929 19:08:44.531703 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:44 crc kubenswrapper[4779]: I0929 19:08:44.531748 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:44 crc kubenswrapper[4779]: I0929 19:08:44.531760 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:44 crc kubenswrapper[4779]: I0929 19:08:44.531776 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:08:44 crc kubenswrapper[4779]: I0929 19:08:44.531786 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:08:44Z","lastTransitionTime":"2025-09-29T19:08:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:08:44 crc kubenswrapper[4779]: I0929 19:08:44.634681 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:44 crc kubenswrapper[4779]: I0929 19:08:44.634740 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:44 crc kubenswrapper[4779]: I0929 19:08:44.634757 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:44 crc kubenswrapper[4779]: I0929 19:08:44.634781 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:08:44 crc kubenswrapper[4779]: I0929 19:08:44.634798 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:08:44Z","lastTransitionTime":"2025-09-29T19:08:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:08:44 crc kubenswrapper[4779]: I0929 19:08:44.739137 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/network-metrics-daemon-2rtwf"] Sep 29 19:08:44 crc kubenswrapper[4779]: I0929 19:08:44.739544 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:44 crc kubenswrapper[4779]: I0929 19:08:44.739600 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:44 crc kubenswrapper[4779]: I0929 19:08:44.739621 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:44 crc kubenswrapper[4779]: I0929 19:08:44.739648 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:08:44 crc kubenswrapper[4779]: I0929 19:08:44.739667 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:08:44Z","lastTransitionTime":"2025-09-29T19:08:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:08:44 crc kubenswrapper[4779]: I0929 19:08:44.739873 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-2rtwf" Sep 29 19:08:44 crc kubenswrapper[4779]: E0929 19:08:44.739966 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-2rtwf" podUID="4df079c4-34e3-4132-91bb-ad68488552f8" Sep 29 19:08:44 crc kubenswrapper[4779]: I0929 19:08:44.752998 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/4df079c4-34e3-4132-91bb-ad68488552f8-metrics-certs\") pod \"network-metrics-daemon-2rtwf\" (UID: \"4df079c4-34e3-4132-91bb-ad68488552f8\") " pod="openshift-multus/network-metrics-daemon-2rtwf" Sep 29 19:08:44 crc kubenswrapper[4779]: I0929 19:08:44.753092 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s7sr8\" (UniqueName: \"kubernetes.io/projected/4df079c4-34e3-4132-91bb-ad68488552f8-kube-api-access-s7sr8\") pod \"network-metrics-daemon-2rtwf\" (UID: \"4df079c4-34e3-4132-91bb-ad68488552f8\") " pod="openshift-multus/network-metrics-daemon-2rtwf" Sep 29 19:08:44 crc kubenswrapper[4779]: I0929 19:08:44.756432 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-2rtwf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4df079c4-34e3-4132-91bb-ad68488552f8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7sr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7sr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:44Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-2rtwf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:44Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:44 crc kubenswrapper[4779]: I0929 19:08:44.765794 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 19:08:44 crc kubenswrapper[4779]: I0929 19:08:44.765837 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 19:08:44 crc kubenswrapper[4779]: I0929 19:08:44.765884 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 19:08:44 crc kubenswrapper[4779]: E0929 19:08:44.766186 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 19:08:44 crc kubenswrapper[4779]: E0929 19:08:44.766469 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 19:08:44 crc kubenswrapper[4779]: E0929 19:08:44.766582 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 19:08:44 crc kubenswrapper[4779]: I0929 19:08:44.778132 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:44Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:44 crc kubenswrapper[4779]: I0929 19:08:44.805655 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://04d9dd8d319497dd3ed7c565217d8b9d228a1952a3cd0e8dee3cdad781c5770a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:44Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:44 crc kubenswrapper[4779]: I0929 19:08:44.828398 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"476bc421-1113-455e-bcc8-e207e47dad19\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://30844145987b3dae28d2ca3f75a36e21c27216b267ff40fdb83094b8045403c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tb7zk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7168ecfffda2ef4f7782650466ccebb8abcff3293fcd33d44eb4ee24abf6339c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tb7zk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:30Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-d5cnr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:44Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:44 crc kubenswrapper[4779]: I0929 19:08:44.842972 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:44 crc kubenswrapper[4779]: I0929 19:08:44.843009 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:44 crc kubenswrapper[4779]: I0929 19:08:44.843021 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:44 crc kubenswrapper[4779]: I0929 19:08:44.843041 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:08:44 crc kubenswrapper[4779]: I0929 19:08:44.843053 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:08:44Z","lastTransitionTime":"2025-09-29T19:08:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:08:44 crc kubenswrapper[4779]: I0929 19:08:44.846369 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jfbb6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3ac24bbf-c37a-4253-be71-8d8f15cfd48e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2ea6959ae00f851559cc059498a3a7848fbbb55f6a25ed82e38efe62a7ee85ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8ckjq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:30Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jfbb6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:44Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:44 crc kubenswrapper[4779]: I0929 19:08:44.854017 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s7sr8\" (UniqueName: \"kubernetes.io/projected/4df079c4-34e3-4132-91bb-ad68488552f8-kube-api-access-s7sr8\") pod \"network-metrics-daemon-2rtwf\" (UID: \"4df079c4-34e3-4132-91bb-ad68488552f8\") " pod="openshift-multus/network-metrics-daemon-2rtwf" Sep 29 19:08:44 crc kubenswrapper[4779]: I0929 19:08:44.854087 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/4df079c4-34e3-4132-91bb-ad68488552f8-metrics-certs\") pod \"network-metrics-daemon-2rtwf\" (UID: \"4df079c4-34e3-4132-91bb-ad68488552f8\") " pod="openshift-multus/network-metrics-daemon-2rtwf" Sep 29 19:08:44 crc kubenswrapper[4779]: E0929 19:08:44.854194 4779 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Sep 29 19:08:44 crc kubenswrapper[4779]: E0929 19:08:44.854248 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/4df079c4-34e3-4132-91bb-ad68488552f8-metrics-certs podName:4df079c4-34e3-4132-91bb-ad68488552f8 nodeName:}" failed. No retries permitted until 2025-09-29 19:08:45.354231499 +0000 UTC m=+36.238656609 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/4df079c4-34e3-4132-91bb-ad68488552f8-metrics-certs") pod "network-metrics-daemon-2rtwf" (UID: "4df079c4-34e3-4132-91bb-ad68488552f8") : object "openshift-multus"/"metrics-daemon-secret" not registered Sep 29 19:08:44 crc kubenswrapper[4779]: I0929 19:08:44.867628 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-cvx8m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"54a33b8e-b623-4f91-be1d-a38dfcef17d7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://49599fb872eb458cb3fd77da63518f77653f671e82a8fc1db5f9e2b64e3c4d1d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e697e7c54a1b8adfde2dfa86399bfd13d895196c12108ac625bd9b94ad913d1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e697e7c54a1b8adfde2dfa86399bfd13d895196c12108ac625bd9b94ad913d1e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://75932bd27c986408eb2a3ec9aefffa392bf1ede4be0ddeabb9fdcd5fcc56199d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://75932bd27c986408eb2a3ec9aefffa392bf1ede4be0ddeabb9fdcd5fcc56199d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e058271bf65bb6f216298a9fc58b97c76335275829be7e5bec59d92688e83a05\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e058271bf65bb6f216298a9fc58b97c76335275829be7e5bec59d92688e83a05\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://edec2f7ae27e72dcc2eac62281dc9b866cdc31ecbda5e24020043aad86a44784\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://edec2f7ae27e72dcc2eac62281dc9b866cdc31ecbda5e24020043aad86a44784\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1750034ffcb61628b437e7b4f604be66ea6401d0ddb8a70f37fbb358c261670f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1750034ffcb61628b437e7b4f604be66ea6401d0ddb8a70f37fbb358c261670f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3409883fc5d9ed6b5e08d50c9d2821a4ac0b8932c9a0296c12065b0abb0354d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3409883fc5d9ed6b5e08d50c9d2821a4ac0b8932c9a0296c12065b0abb0354d6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:30Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-cvx8m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:44Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:44 crc kubenswrapper[4779]: I0929 19:08:44.882376 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"49dd46af-a7fa-47b0-94ab-c9a999564fca\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a67efc46aeb5956568b680b4041d83a55c0410ccefdb4e43b95503c6416e1dfc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2253a3406f6de9463c1c615ad2ac76ce935067baa57b260f18afdb8d9e639e95\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa9b804f65dc9265342d6ec10b8cdd31d49f692289c7fd8292944e25a2c95ebe\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://02428a90ed2dd1c36d800ffda10ed8759d92a87ec8f467cdf9fb11511b9b9420\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:09Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:44Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:44 crc kubenswrapper[4779]: I0929 19:08:44.888950 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s7sr8\" (UniqueName: \"kubernetes.io/projected/4df079c4-34e3-4132-91bb-ad68488552f8-kube-api-access-s7sr8\") pod \"network-metrics-daemon-2rtwf\" (UID: \"4df079c4-34e3-4132-91bb-ad68488552f8\") " pod="openshift-multus/network-metrics-daemon-2rtwf" Sep 29 19:08:44 crc kubenswrapper[4779]: I0929 19:08:44.897200 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ef5a50f788b42b32f4ec04eedc9f5c9dbcd07f64b430ec684d8a91b414e6c18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb910a5c408a63f6c4391cf2fae320e14d6dc945125b53eefde3b666ef9d0754\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:44Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:44 crc kubenswrapper[4779]: I0929 19:08:44.913609 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://15b9c6a3d7c79b30673cd8040a9f3033e2405cf2e29b38d8ba2bee9cbb384794\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:44Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:44 crc kubenswrapper[4779]: I0929 19:08:44.925307 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-zxpg4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e0db9b28-7e3a-4d44-9e98-1a07c8e5b8d6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2f0f837dfb05912fb4f7b0cb08cdb363cb3bfe2cc91a0bebd1b2b2b8128d78cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6ghv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:33Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-zxpg4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:44Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:44 crc kubenswrapper[4779]: I0929 19:08:44.942797 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2f701cb-2e7d-4cea-8455-f68605964ac6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dfa32890cde2a0a47211239f797f83380b4418ad2b8fb1560c779705e68df39c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a017f2621de03e0184a0a168819a6762e4b36a1ae5c66a6bcfd1d65266fcb52\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a9691127cc0437ceebef7a82219adb83625690db4d6bf5cf1c8177a6a9e1284\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b14ae0b08f077b3acbe802e4a5ec06dd10075100c37d5587119a09a92b4651a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b14ae0b08f077b3acbe802e4a5ec06dd10075100c37d5587119a09a92b4651a\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T19:08:29Z\\\",\\\"message\\\":\\\"le observer\\\\nW0929 19:08:29.408025 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0929 19:08:29.408423 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 19:08:29.411188 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1038548135/tls.crt::/tmp/serving-cert-1038548135/tls.key\\\\\\\"\\\\nI0929 19:08:29.848353 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0929 19:08:29.866865 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0929 19:08:29.866980 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0929 19:08:29.867032 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0929 19:08:29.867061 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0929 19:08:29.872666 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0929 19:08:29.872766 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0929 19:08:29.872793 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0929 19:08:29.872817 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0929 19:08:29.872839 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0929 19:08:29.872861 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0929 19:08:29.872883 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0929 19:08:29.873077 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0929 19:08:29.875629 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:24Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f0916d37ad679d190403229b40390af2860e24f777a07cd5ef0ac2f7d3fc3c41\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5a737775c31cf8dc37838d904ee00f1075bc2aa52bea63587806cb7486950eaf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5a737775c31cf8dc37838d904ee00f1075bc2aa52bea63587806cb7486950eaf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:09Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:44Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:44 crc kubenswrapper[4779]: I0929 19:08:44.945830 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:44 crc kubenswrapper[4779]: I0929 19:08:44.946019 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:44 crc kubenswrapper[4779]: I0929 19:08:44.946164 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:44 crc kubenswrapper[4779]: I0929 19:08:44.946349 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:08:44 crc kubenswrapper[4779]: I0929 19:08:44.946481 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:08:44Z","lastTransitionTime":"2025-09-29T19:08:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:08:44 crc kubenswrapper[4779]: I0929 19:08:44.959574 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:44Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:44 crc kubenswrapper[4779]: I0929 19:08:44.996014 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-42vjg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"046df2ef-fb75-4d32-93e6-17b36af0a7c2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://629d703a556665e86ce3b0a5af995157440c71c8ab7d29b9259865cd624543c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1555e96e70a9119cf2298d91512b1d8cffb303b4327d0e20bdf686e7e36e4461\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0d18ddf7528c789be80d32cd6921ada5ed114d01130b7be97e73ec6d9168267e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b6ffbcb52a6e250cd4b4bc4e8c089ef94babaaf0db836fec9f477450464eebe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://76304a56be996b646d706c0e7df995bb1518962396c7558304b1627e5c215c11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6dcd0277739f6d5e5c90c617485f053c1c3996192f15b081c660741987ec2611\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://418a68d2ab745fecfe78084a541301c6af31b6ece2ccb58d61be58b2989235ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6b07760e693cb3b51af82207169b6c5af762338919293e6d3d2e79a95649bd1f\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T19:08:40Z\\\",\\\"message\\\":\\\"9 19:08:40.383515 6050 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0929 19:08:40.383552 6050 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0929 19:08:40.383581 6050 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0929 19:08:40.383612 6050 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI0929 19:08:40.383640 6050 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI0929 19:08:40.383646 6050 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI0929 19:08:40.383677 6050 handler.go:208] Removed *v1.Pod event handler 6\\\\nI0929 19:08:40.383692 6050 handler.go:208] Removed *v1.Node event handler 2\\\\nI0929 19:08:40.383702 6050 handler.go:208] Removed *v1.Node event handler 7\\\\nI0929 19:08:40.383712 6050 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0929 19:08:40.383721 6050 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI0929 19:08:40.383731 6050 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0929 19:08:40.383740 6050 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0929 19:08:40.383750 6050 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI0929 19:08:40.383759 6050 handler.go:208] Removed *v1.Pod event handler 3\\\\nI0929 19:08:40.383870 6050 factory.go:656] Stopping \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:37Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://418a68d2ab745fecfe78084a541301c6af31b6ece2ccb58d61be58b2989235ba\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T19:08:41Z\\\",\\\"message\\\":\\\"rr: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:41Z is after 2025-08-24T17:21:41Z]\\\\nI0929 19:08:41.905972 6175 model_client.go:382] Update operations generated as: [{Op:update Table:NAT Row:map[external_ip:192.168.126.11 logical_ip:10.217.0.92 options:{GoMap:map[stateless:false]} type:snat] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {73135118-cf1b-4568-bd31-2f50308bf69d}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI0929 19:08:41.905727 6175 services_controller.go:434] Service default/kubernetes retrieved from lister for network=default: \\\\u0026Service{ObjectMeta:{kubernetes default 1fcaffea-cfe2-4295-9c2a-a3b3626fb3f1 259 0 2025-02-23 05:11:12 +0000 UTC \\\\u003cnil\\\\u003e \\\\u003cnil\\\\u003e map[component:apiserver provider:kubernetes] map[] [] [] []},Spec:Servic\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c8993f5be0b77b752f60e7346f609aaeef20db2f0e72cbebde06a11fa7d2f5bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e1e03c63d08ebe2a4b76954140684a5b43da9e92b93490aa7efa715fc2aefb4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e1e03c63d08ebe2a4b76954140684a5b43da9e92b93490aa7efa715fc2aefb4c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:30Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-42vjg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:44Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:45 crc kubenswrapper[4779]: I0929 19:08:45.008841 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-7hb2m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"45b89d12-bbd2-4b47-815d-a7421cc1aa00\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://002d12663649198e6136f3e9df16a3b1cd8c64906bf39bde03cd0dde8c36abbc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bvn4g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:30Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-7hb2m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:45Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:45 crc kubenswrapper[4779]: I0929 19:08:45.024480 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:45Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:45 crc kubenswrapper[4779]: I0929 19:08:45.030562 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fvdtc" event={"ID":"97c4781c-2d4b-4eab-96fb-39a342c2d4a0","Type":"ContainerStarted","Data":"3aeed56672d12f2355eda710f9021c6271debfaf1f7b7d6b1ff5f4ecc6764e2e"} Sep 29 19:08:45 crc kubenswrapper[4779]: I0929 19:08:45.030622 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fvdtc" event={"ID":"97c4781c-2d4b-4eab-96fb-39a342c2d4a0","Type":"ContainerStarted","Data":"c2179d9cf26eb6d6fcc63666d3dd53e9a1a66cf633c9fe0b29e6d6ac726c8119"} Sep 29 19:08:45 crc kubenswrapper[4779]: I0929 19:08:45.030675 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fvdtc" event={"ID":"97c4781c-2d4b-4eab-96fb-39a342c2d4a0","Type":"ContainerStarted","Data":"8453bdaee3e644210a2c5ef10df8a01e689172c36fdc595fa11078b46001dd3e"} Sep 29 19:08:45 crc kubenswrapper[4779]: I0929 19:08:45.040820 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fvdtc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"97c4781c-2d4b-4eab-96fb-39a342c2d4a0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7cxcv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7cxcv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:43Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-fvdtc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:45Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:45 crc kubenswrapper[4779]: I0929 19:08:45.049002 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:45 crc kubenswrapper[4779]: I0929 19:08:45.049032 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:45 crc kubenswrapper[4779]: I0929 19:08:45.049040 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:45 crc kubenswrapper[4779]: I0929 19:08:45.049052 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:08:45 crc kubenswrapper[4779]: I0929 19:08:45.049071 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:08:45Z","lastTransitionTime":"2025-09-29T19:08:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:08:45 crc kubenswrapper[4779]: I0929 19:08:45.054934 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2f701cb-2e7d-4cea-8455-f68605964ac6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dfa32890cde2a0a47211239f797f83380b4418ad2b8fb1560c779705e68df39c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a017f2621de03e0184a0a168819a6762e4b36a1ae5c66a6bcfd1d65266fcb52\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a9691127cc0437ceebef7a82219adb83625690db4d6bf5cf1c8177a6a9e1284\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b14ae0b08f077b3acbe802e4a5ec06dd10075100c37d5587119a09a92b4651a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b14ae0b08f077b3acbe802e4a5ec06dd10075100c37d5587119a09a92b4651a\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T19:08:29Z\\\",\\\"message\\\":\\\"le observer\\\\nW0929 19:08:29.408025 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0929 19:08:29.408423 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 19:08:29.411188 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1038548135/tls.crt::/tmp/serving-cert-1038548135/tls.key\\\\\\\"\\\\nI0929 19:08:29.848353 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0929 19:08:29.866865 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0929 19:08:29.866980 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0929 19:08:29.867032 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0929 19:08:29.867061 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0929 19:08:29.872666 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0929 19:08:29.872766 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0929 19:08:29.872793 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0929 19:08:29.872817 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0929 19:08:29.872839 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0929 19:08:29.872861 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0929 19:08:29.872883 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0929 19:08:29.873077 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0929 19:08:29.875629 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:24Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f0916d37ad679d190403229b40390af2860e24f777a07cd5ef0ac2f7d3fc3c41\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5a737775c31cf8dc37838d904ee00f1075bc2aa52bea63587806cb7486950eaf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5a737775c31cf8dc37838d904ee00f1075bc2aa52bea63587806cb7486950eaf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:09Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:45Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:45 crc kubenswrapper[4779]: I0929 19:08:45.066708 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:45Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:45 crc kubenswrapper[4779]: I0929 19:08:45.088965 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-42vjg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"046df2ef-fb75-4d32-93e6-17b36af0a7c2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://629d703a556665e86ce3b0a5af995157440c71c8ab7d29b9259865cd624543c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1555e96e70a9119cf2298d91512b1d8cffb303b4327d0e20bdf686e7e36e4461\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0d18ddf7528c789be80d32cd6921ada5ed114d01130b7be97e73ec6d9168267e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b6ffbcb52a6e250cd4b4bc4e8c089ef94babaaf0db836fec9f477450464eebe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://76304a56be996b646d706c0e7df995bb1518962396c7558304b1627e5c215c11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6dcd0277739f6d5e5c90c617485f053c1c3996192f15b081c660741987ec2611\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://418a68d2ab745fecfe78084a541301c6af31b6ece2ccb58d61be58b2989235ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6b07760e693cb3b51af82207169b6c5af762338919293e6d3d2e79a95649bd1f\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T19:08:40Z\\\",\\\"message\\\":\\\"9 19:08:40.383515 6050 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0929 19:08:40.383552 6050 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0929 19:08:40.383581 6050 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0929 19:08:40.383612 6050 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI0929 19:08:40.383640 6050 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI0929 19:08:40.383646 6050 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI0929 19:08:40.383677 6050 handler.go:208] Removed *v1.Pod event handler 6\\\\nI0929 19:08:40.383692 6050 handler.go:208] Removed *v1.Node event handler 2\\\\nI0929 19:08:40.383702 6050 handler.go:208] Removed *v1.Node event handler 7\\\\nI0929 19:08:40.383712 6050 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0929 19:08:40.383721 6050 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI0929 19:08:40.383731 6050 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0929 19:08:40.383740 6050 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0929 19:08:40.383750 6050 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI0929 19:08:40.383759 6050 handler.go:208] Removed *v1.Pod event handler 3\\\\nI0929 19:08:40.383870 6050 factory.go:656] Stopping \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:37Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://418a68d2ab745fecfe78084a541301c6af31b6ece2ccb58d61be58b2989235ba\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T19:08:41Z\\\",\\\"message\\\":\\\"rr: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:41Z is after 2025-08-24T17:21:41Z]\\\\nI0929 19:08:41.905972 6175 model_client.go:382] Update operations generated as: [{Op:update Table:NAT Row:map[external_ip:192.168.126.11 logical_ip:10.217.0.92 options:{GoMap:map[stateless:false]} type:snat] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {73135118-cf1b-4568-bd31-2f50308bf69d}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI0929 19:08:41.905727 6175 services_controller.go:434] Service default/kubernetes retrieved from lister for network=default: \\\\u0026Service{ObjectMeta:{kubernetes default 1fcaffea-cfe2-4295-9c2a-a3b3626fb3f1 259 0 2025-02-23 05:11:12 +0000 UTC \\\\u003cnil\\\\u003e \\\\u003cnil\\\\u003e map[component:apiserver provider:kubernetes] map[] [] [] []},Spec:Servic\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c8993f5be0b77b752f60e7346f609aaeef20db2f0e72cbebde06a11fa7d2f5bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e1e03c63d08ebe2a4b76954140684a5b43da9e92b93490aa7efa715fc2aefb4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e1e03c63d08ebe2a4b76954140684a5b43da9e92b93490aa7efa715fc2aefb4c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:30Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-42vjg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:45Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:45 crc kubenswrapper[4779]: I0929 19:08:45.099629 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-zxpg4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e0db9b28-7e3a-4d44-9e98-1a07c8e5b8d6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2f0f837dfb05912fb4f7b0cb08cdb363cb3bfe2cc91a0bebd1b2b2b8128d78cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6ghv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:33Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-zxpg4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:45Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:45 crc kubenswrapper[4779]: I0929 19:08:45.109718 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-7hb2m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"45b89d12-bbd2-4b47-815d-a7421cc1aa00\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://002d12663649198e6136f3e9df16a3b1cd8c64906bf39bde03cd0dde8c36abbc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bvn4g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:30Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-7hb2m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:45Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:45 crc kubenswrapper[4779]: I0929 19:08:45.120417 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:45Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:45 crc kubenswrapper[4779]: I0929 19:08:45.130243 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fvdtc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"97c4781c-2d4b-4eab-96fb-39a342c2d4a0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c2179d9cf26eb6d6fcc63666d3dd53e9a1a66cf633c9fe0b29e6d6ac726c8119\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7cxcv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3aeed56672d12f2355eda710f9021c6271debfaf1f7b7d6b1ff5f4ecc6764e2e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7cxcv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:43Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-fvdtc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:45Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:45 crc kubenswrapper[4779]: I0929 19:08:45.142898 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:45Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:45 crc kubenswrapper[4779]: I0929 19:08:45.151804 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:45 crc kubenswrapper[4779]: I0929 19:08:45.152035 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:45 crc kubenswrapper[4779]: I0929 19:08:45.152123 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:45 crc kubenswrapper[4779]: I0929 19:08:45.152191 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:08:45 crc kubenswrapper[4779]: I0929 19:08:45.152252 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:08:45Z","lastTransitionTime":"2025-09-29T19:08:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:08:45 crc kubenswrapper[4779]: I0929 19:08:45.156790 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://04d9dd8d319497dd3ed7c565217d8b9d228a1952a3cd0e8dee3cdad781c5770a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:45Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:45 crc kubenswrapper[4779]: I0929 19:08:45.174197 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"476bc421-1113-455e-bcc8-e207e47dad19\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://30844145987b3dae28d2ca3f75a36e21c27216b267ff40fdb83094b8045403c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tb7zk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7168ecfffda2ef4f7782650466ccebb8abcff3293fcd33d44eb4ee24abf6339c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tb7zk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:30Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-d5cnr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:45Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:45 crc kubenswrapper[4779]: I0929 19:08:45.190176 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-2rtwf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4df079c4-34e3-4132-91bb-ad68488552f8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7sr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7sr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:44Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-2rtwf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:45Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:45 crc kubenswrapper[4779]: I0929 19:08:45.215036 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-cvx8m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"54a33b8e-b623-4f91-be1d-a38dfcef17d7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://49599fb872eb458cb3fd77da63518f77653f671e82a8fc1db5f9e2b64e3c4d1d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e697e7c54a1b8adfde2dfa86399bfd13d895196c12108ac625bd9b94ad913d1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e697e7c54a1b8adfde2dfa86399bfd13d895196c12108ac625bd9b94ad913d1e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://75932bd27c986408eb2a3ec9aefffa392bf1ede4be0ddeabb9fdcd5fcc56199d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://75932bd27c986408eb2a3ec9aefffa392bf1ede4be0ddeabb9fdcd5fcc56199d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e058271bf65bb6f216298a9fc58b97c76335275829be7e5bec59d92688e83a05\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e058271bf65bb6f216298a9fc58b97c76335275829be7e5bec59d92688e83a05\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://edec2f7ae27e72dcc2eac62281dc9b866cdc31ecbda5e24020043aad86a44784\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://edec2f7ae27e72dcc2eac62281dc9b866cdc31ecbda5e24020043aad86a44784\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1750034ffcb61628b437e7b4f604be66ea6401d0ddb8a70f37fbb358c261670f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1750034ffcb61628b437e7b4f604be66ea6401d0ddb8a70f37fbb358c261670f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3409883fc5d9ed6b5e08d50c9d2821a4ac0b8932c9a0296c12065b0abb0354d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3409883fc5d9ed6b5e08d50c9d2821a4ac0b8932c9a0296c12065b0abb0354d6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:30Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-cvx8m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:45Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:45 crc kubenswrapper[4779]: I0929 19:08:45.231661 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"49dd46af-a7fa-47b0-94ab-c9a999564fca\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a67efc46aeb5956568b680b4041d83a55c0410ccefdb4e43b95503c6416e1dfc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2253a3406f6de9463c1c615ad2ac76ce935067baa57b260f18afdb8d9e639e95\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa9b804f65dc9265342d6ec10b8cdd31d49f692289c7fd8292944e25a2c95ebe\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://02428a90ed2dd1c36d800ffda10ed8759d92a87ec8f467cdf9fb11511b9b9420\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:09Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:45Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:45 crc kubenswrapper[4779]: I0929 19:08:45.247509 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ef5a50f788b42b32f4ec04eedc9f5c9dbcd07f64b430ec684d8a91b414e6c18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb910a5c408a63f6c4391cf2fae320e14d6dc945125b53eefde3b666ef9d0754\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:45Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:45 crc kubenswrapper[4779]: I0929 19:08:45.254494 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:45 crc kubenswrapper[4779]: I0929 19:08:45.254618 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:45 crc kubenswrapper[4779]: I0929 19:08:45.254676 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:45 crc kubenswrapper[4779]: I0929 19:08:45.254742 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:08:45 crc kubenswrapper[4779]: I0929 19:08:45.254811 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:08:45Z","lastTransitionTime":"2025-09-29T19:08:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:08:45 crc kubenswrapper[4779]: I0929 19:08:45.261166 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://15b9c6a3d7c79b30673cd8040a9f3033e2405cf2e29b38d8ba2bee9cbb384794\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:45Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:45 crc kubenswrapper[4779]: I0929 19:08:45.274940 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jfbb6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3ac24bbf-c37a-4253-be71-8d8f15cfd48e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2ea6959ae00f851559cc059498a3a7848fbbb55f6a25ed82e38efe62a7ee85ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8ckjq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:30Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jfbb6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:45Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:45 crc kubenswrapper[4779]: I0929 19:08:45.357046 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:45 crc kubenswrapper[4779]: I0929 19:08:45.357083 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:45 crc kubenswrapper[4779]: I0929 19:08:45.357096 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:45 crc kubenswrapper[4779]: I0929 19:08:45.357110 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:08:45 crc kubenswrapper[4779]: I0929 19:08:45.357128 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:08:45Z","lastTransitionTime":"2025-09-29T19:08:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:08:45 crc kubenswrapper[4779]: I0929 19:08:45.359004 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/4df079c4-34e3-4132-91bb-ad68488552f8-metrics-certs\") pod \"network-metrics-daemon-2rtwf\" (UID: \"4df079c4-34e3-4132-91bb-ad68488552f8\") " pod="openshift-multus/network-metrics-daemon-2rtwf" Sep 29 19:08:45 crc kubenswrapper[4779]: E0929 19:08:45.359177 4779 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Sep 29 19:08:45 crc kubenswrapper[4779]: E0929 19:08:45.359252 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/4df079c4-34e3-4132-91bb-ad68488552f8-metrics-certs podName:4df079c4-34e3-4132-91bb-ad68488552f8 nodeName:}" failed. No retries permitted until 2025-09-29 19:08:46.359232463 +0000 UTC m=+37.243657563 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/4df079c4-34e3-4132-91bb-ad68488552f8-metrics-certs") pod "network-metrics-daemon-2rtwf" (UID: "4df079c4-34e3-4132-91bb-ad68488552f8") : object "openshift-multus"/"metrics-daemon-secret" not registered Sep 29 19:08:45 crc kubenswrapper[4779]: I0929 19:08:45.460180 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:45 crc kubenswrapper[4779]: I0929 19:08:45.460226 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:45 crc kubenswrapper[4779]: I0929 19:08:45.460238 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:45 crc kubenswrapper[4779]: I0929 19:08:45.460259 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:08:45 crc kubenswrapper[4779]: I0929 19:08:45.460274 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:08:45Z","lastTransitionTime":"2025-09-29T19:08:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:08:45 crc kubenswrapper[4779]: I0929 19:08:45.563258 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:45 crc kubenswrapper[4779]: I0929 19:08:45.563359 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:45 crc kubenswrapper[4779]: I0929 19:08:45.563385 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:45 crc kubenswrapper[4779]: I0929 19:08:45.563414 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:08:45 crc kubenswrapper[4779]: I0929 19:08:45.563436 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:08:45Z","lastTransitionTime":"2025-09-29T19:08:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:08:45 crc kubenswrapper[4779]: I0929 19:08:45.666889 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:45 crc kubenswrapper[4779]: I0929 19:08:45.666947 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:45 crc kubenswrapper[4779]: I0929 19:08:45.666965 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:45 crc kubenswrapper[4779]: I0929 19:08:45.666990 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:08:45 crc kubenswrapper[4779]: I0929 19:08:45.667008 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:08:45Z","lastTransitionTime":"2025-09-29T19:08:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:08:45 crc kubenswrapper[4779]: I0929 19:08:45.770861 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:45 crc kubenswrapper[4779]: I0929 19:08:45.770970 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:45 crc kubenswrapper[4779]: I0929 19:08:45.771007 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:45 crc kubenswrapper[4779]: I0929 19:08:45.771055 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:08:45 crc kubenswrapper[4779]: I0929 19:08:45.771082 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:08:45Z","lastTransitionTime":"2025-09-29T19:08:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:08:45 crc kubenswrapper[4779]: I0929 19:08:45.875832 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:45 crc kubenswrapper[4779]: I0929 19:08:45.876544 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:45 crc kubenswrapper[4779]: I0929 19:08:45.876557 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:45 crc kubenswrapper[4779]: I0929 19:08:45.876575 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:08:45 crc kubenswrapper[4779]: I0929 19:08:45.876586 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:08:45Z","lastTransitionTime":"2025-09-29T19:08:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:08:45 crc kubenswrapper[4779]: I0929 19:08:45.980038 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:45 crc kubenswrapper[4779]: I0929 19:08:45.980103 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:45 crc kubenswrapper[4779]: I0929 19:08:45.980120 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:45 crc kubenswrapper[4779]: I0929 19:08:45.980146 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:08:45 crc kubenswrapper[4779]: I0929 19:08:45.980164 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:08:45Z","lastTransitionTime":"2025-09-29T19:08:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:08:46 crc kubenswrapper[4779]: I0929 19:08:46.083228 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:46 crc kubenswrapper[4779]: I0929 19:08:46.083261 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:46 crc kubenswrapper[4779]: I0929 19:08:46.083270 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:46 crc kubenswrapper[4779]: I0929 19:08:46.083302 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:08:46 crc kubenswrapper[4779]: I0929 19:08:46.083333 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:08:46Z","lastTransitionTime":"2025-09-29T19:08:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:08:46 crc kubenswrapper[4779]: I0929 19:08:46.185762 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:46 crc kubenswrapper[4779]: I0929 19:08:46.185835 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:46 crc kubenswrapper[4779]: I0929 19:08:46.185846 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:46 crc kubenswrapper[4779]: I0929 19:08:46.185865 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:08:46 crc kubenswrapper[4779]: I0929 19:08:46.185876 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:08:46Z","lastTransitionTime":"2025-09-29T19:08:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:08:46 crc kubenswrapper[4779]: I0929 19:08:46.227052 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:46 crc kubenswrapper[4779]: I0929 19:08:46.227117 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:46 crc kubenswrapper[4779]: I0929 19:08:46.227135 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:46 crc kubenswrapper[4779]: I0929 19:08:46.227161 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:08:46 crc kubenswrapper[4779]: I0929 19:08:46.227180 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:08:46Z","lastTransitionTime":"2025-09-29T19:08:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:08:46 crc kubenswrapper[4779]: E0929 19:08:46.246206 4779 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T19:08:46Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:46Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T19:08:46Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:46Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T19:08:46Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:46Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T19:08:46Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:46Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c0f08dfd-4d0c-4b55-a30c-6725bfe13689\\\",\\\"systemUUID\\\":\\\"d61591a8-214b-4be1-8c58-e9ade5216b62\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:46Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:46 crc kubenswrapper[4779]: I0929 19:08:46.251815 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:46 crc kubenswrapper[4779]: I0929 19:08:46.251891 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:46 crc kubenswrapper[4779]: I0929 19:08:46.251916 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:46 crc kubenswrapper[4779]: I0929 19:08:46.251943 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:08:46 crc kubenswrapper[4779]: I0929 19:08:46.251965 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:08:46Z","lastTransitionTime":"2025-09-29T19:08:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:08:46 crc kubenswrapper[4779]: E0929 19:08:46.276887 4779 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T19:08:46Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:46Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T19:08:46Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:46Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T19:08:46Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:46Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T19:08:46Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:46Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c0f08dfd-4d0c-4b55-a30c-6725bfe13689\\\",\\\"systemUUID\\\":\\\"d61591a8-214b-4be1-8c58-e9ade5216b62\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:46Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:46 crc kubenswrapper[4779]: I0929 19:08:46.282265 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:46 crc kubenswrapper[4779]: I0929 19:08:46.282328 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:46 crc kubenswrapper[4779]: I0929 19:08:46.282343 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:46 crc kubenswrapper[4779]: I0929 19:08:46.282368 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:08:46 crc kubenswrapper[4779]: I0929 19:08:46.282387 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:08:46Z","lastTransitionTime":"2025-09-29T19:08:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:08:46 crc kubenswrapper[4779]: E0929 19:08:46.304405 4779 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T19:08:46Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:46Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T19:08:46Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:46Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T19:08:46Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:46Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T19:08:46Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:46Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c0f08dfd-4d0c-4b55-a30c-6725bfe13689\\\",\\\"systemUUID\\\":\\\"d61591a8-214b-4be1-8c58-e9ade5216b62\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:46Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:46 crc kubenswrapper[4779]: I0929 19:08:46.310520 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:46 crc kubenswrapper[4779]: I0929 19:08:46.310587 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:46 crc kubenswrapper[4779]: I0929 19:08:46.310602 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:46 crc kubenswrapper[4779]: I0929 19:08:46.310624 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:08:46 crc kubenswrapper[4779]: I0929 19:08:46.310635 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:08:46Z","lastTransitionTime":"2025-09-29T19:08:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:08:46 crc kubenswrapper[4779]: E0929 19:08:46.328634 4779 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T19:08:46Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:46Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T19:08:46Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:46Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T19:08:46Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:46Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T19:08:46Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:46Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c0f08dfd-4d0c-4b55-a30c-6725bfe13689\\\",\\\"systemUUID\\\":\\\"d61591a8-214b-4be1-8c58-e9ade5216b62\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:46Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:46 crc kubenswrapper[4779]: I0929 19:08:46.332937 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:46 crc kubenswrapper[4779]: I0929 19:08:46.332984 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:46 crc kubenswrapper[4779]: I0929 19:08:46.332996 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:46 crc kubenswrapper[4779]: I0929 19:08:46.333016 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:08:46 crc kubenswrapper[4779]: I0929 19:08:46.333027 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:08:46Z","lastTransitionTime":"2025-09-29T19:08:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:08:46 crc kubenswrapper[4779]: E0929 19:08:46.351045 4779 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T19:08:46Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:46Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T19:08:46Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:46Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T19:08:46Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:46Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T19:08:46Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:46Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c0f08dfd-4d0c-4b55-a30c-6725bfe13689\\\",\\\"systemUUID\\\":\\\"d61591a8-214b-4be1-8c58-e9ade5216b62\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:46Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:46 crc kubenswrapper[4779]: E0929 19:08:46.351594 4779 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Sep 29 19:08:46 crc kubenswrapper[4779]: I0929 19:08:46.354193 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:46 crc kubenswrapper[4779]: I0929 19:08:46.354362 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:46 crc kubenswrapper[4779]: I0929 19:08:46.354463 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:46 crc kubenswrapper[4779]: I0929 19:08:46.354579 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:08:46 crc kubenswrapper[4779]: I0929 19:08:46.354687 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:08:46Z","lastTransitionTime":"2025-09-29T19:08:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:08:46 crc kubenswrapper[4779]: I0929 19:08:46.369159 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/4df079c4-34e3-4132-91bb-ad68488552f8-metrics-certs\") pod \"network-metrics-daemon-2rtwf\" (UID: \"4df079c4-34e3-4132-91bb-ad68488552f8\") " pod="openshift-multus/network-metrics-daemon-2rtwf" Sep 29 19:08:46 crc kubenswrapper[4779]: E0929 19:08:46.369466 4779 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Sep 29 19:08:46 crc kubenswrapper[4779]: E0929 19:08:46.369577 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/4df079c4-34e3-4132-91bb-ad68488552f8-metrics-certs podName:4df079c4-34e3-4132-91bb-ad68488552f8 nodeName:}" failed. No retries permitted until 2025-09-29 19:08:48.369548542 +0000 UTC m=+39.253973682 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/4df079c4-34e3-4132-91bb-ad68488552f8-metrics-certs") pod "network-metrics-daemon-2rtwf" (UID: "4df079c4-34e3-4132-91bb-ad68488552f8") : object "openshift-multus"/"metrics-daemon-secret" not registered Sep 29 19:08:46 crc kubenswrapper[4779]: I0929 19:08:46.457593 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:46 crc kubenswrapper[4779]: I0929 19:08:46.457657 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:46 crc kubenswrapper[4779]: I0929 19:08:46.457679 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:46 crc kubenswrapper[4779]: I0929 19:08:46.457707 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:08:46 crc kubenswrapper[4779]: I0929 19:08:46.457740 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:08:46Z","lastTransitionTime":"2025-09-29T19:08:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:08:46 crc kubenswrapper[4779]: I0929 19:08:46.560079 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:46 crc kubenswrapper[4779]: I0929 19:08:46.560123 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:46 crc kubenswrapper[4779]: I0929 19:08:46.560132 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:46 crc kubenswrapper[4779]: I0929 19:08:46.560145 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:08:46 crc kubenswrapper[4779]: I0929 19:08:46.560155 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:08:46Z","lastTransitionTime":"2025-09-29T19:08:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:08:46 crc kubenswrapper[4779]: I0929 19:08:46.662517 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:46 crc kubenswrapper[4779]: I0929 19:08:46.662578 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:46 crc kubenswrapper[4779]: I0929 19:08:46.662615 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:46 crc kubenswrapper[4779]: I0929 19:08:46.662648 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:08:46 crc kubenswrapper[4779]: I0929 19:08:46.662674 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:08:46Z","lastTransitionTime":"2025-09-29T19:08:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:08:46 crc kubenswrapper[4779]: I0929 19:08:46.673297 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 19:08:46 crc kubenswrapper[4779]: E0929 19:08:46.673646 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 19:09:02.673566374 +0000 UTC m=+53.557991494 (durationBeforeRetry 16s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 19:08:46 crc kubenswrapper[4779]: I0929 19:08:46.673761 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 19:08:46 crc kubenswrapper[4779]: I0929 19:08:46.673958 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 19:08:46 crc kubenswrapper[4779]: I0929 19:08:46.674007 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 19:08:46 crc kubenswrapper[4779]: I0929 19:08:46.674069 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 19:08:46 crc kubenswrapper[4779]: E0929 19:08:46.674431 4779 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Sep 29 19:08:46 crc kubenswrapper[4779]: E0929 19:08:46.674460 4779 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Sep 29 19:08:46 crc kubenswrapper[4779]: E0929 19:08:46.674512 4779 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Sep 29 19:08:46 crc kubenswrapper[4779]: E0929 19:08:46.674552 4779 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Sep 29 19:08:46 crc kubenswrapper[4779]: E0929 19:08:46.674562 4779 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Sep 29 19:08:46 crc kubenswrapper[4779]: E0929 19:08:46.674573 4779 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 29 19:08:46 crc kubenswrapper[4779]: E0929 19:08:46.674591 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-09-29 19:09:02.67456036 +0000 UTC m=+53.558985650 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Sep 29 19:08:46 crc kubenswrapper[4779]: E0929 19:08:46.674476 4779 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Sep 29 19:08:46 crc kubenswrapper[4779]: E0929 19:08:46.674644 4779 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 29 19:08:46 crc kubenswrapper[4779]: E0929 19:08:46.674649 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-09-29 19:09:02.674621652 +0000 UTC m=+53.559046792 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Sep 29 19:08:46 crc kubenswrapper[4779]: E0929 19:08:46.674694 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-09-29 19:09:02.674674163 +0000 UTC m=+53.559099493 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 29 19:08:46 crc kubenswrapper[4779]: E0929 19:08:46.674730 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-09-29 19:09:02.674713904 +0000 UTC m=+53.559139214 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 29 19:08:46 crc kubenswrapper[4779]: I0929 19:08:46.765720 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 19:08:46 crc kubenswrapper[4779]: I0929 19:08:46.765764 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 19:08:46 crc kubenswrapper[4779]: I0929 19:08:46.765726 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-2rtwf" Sep 29 19:08:46 crc kubenswrapper[4779]: I0929 19:08:46.765731 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 19:08:46 crc kubenswrapper[4779]: E0929 19:08:46.765900 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 19:08:46 crc kubenswrapper[4779]: E0929 19:08:46.766058 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 19:08:46 crc kubenswrapper[4779]: E0929 19:08:46.766116 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 19:08:46 crc kubenswrapper[4779]: E0929 19:08:46.766243 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-2rtwf" podUID="4df079c4-34e3-4132-91bb-ad68488552f8" Sep 29 19:08:46 crc kubenswrapper[4779]: I0929 19:08:46.766296 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:46 crc kubenswrapper[4779]: I0929 19:08:46.766338 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:46 crc kubenswrapper[4779]: I0929 19:08:46.766347 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:46 crc kubenswrapper[4779]: I0929 19:08:46.766360 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:08:46 crc kubenswrapper[4779]: I0929 19:08:46.766368 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:08:46Z","lastTransitionTime":"2025-09-29T19:08:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:08:46 crc kubenswrapper[4779]: I0929 19:08:46.869822 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:46 crc kubenswrapper[4779]: I0929 19:08:46.869880 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:46 crc kubenswrapper[4779]: I0929 19:08:46.869898 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:46 crc kubenswrapper[4779]: I0929 19:08:46.869924 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:08:46 crc kubenswrapper[4779]: I0929 19:08:46.869942 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:08:46Z","lastTransitionTime":"2025-09-29T19:08:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:08:46 crc kubenswrapper[4779]: I0929 19:08:46.973120 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:46 crc kubenswrapper[4779]: I0929 19:08:46.973188 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:46 crc kubenswrapper[4779]: I0929 19:08:46.973205 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:46 crc kubenswrapper[4779]: I0929 19:08:46.973234 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:08:46 crc kubenswrapper[4779]: I0929 19:08:46.973252 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:08:46Z","lastTransitionTime":"2025-09-29T19:08:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:08:47 crc kubenswrapper[4779]: I0929 19:08:47.076397 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:47 crc kubenswrapper[4779]: I0929 19:08:47.076481 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:47 crc kubenswrapper[4779]: I0929 19:08:47.076510 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:47 crc kubenswrapper[4779]: I0929 19:08:47.076542 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:08:47 crc kubenswrapper[4779]: I0929 19:08:47.076566 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:08:47Z","lastTransitionTime":"2025-09-29T19:08:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:08:47 crc kubenswrapper[4779]: I0929 19:08:47.179873 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:47 crc kubenswrapper[4779]: I0929 19:08:47.179942 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:47 crc kubenswrapper[4779]: I0929 19:08:47.179968 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:47 crc kubenswrapper[4779]: I0929 19:08:47.179997 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:08:47 crc kubenswrapper[4779]: I0929 19:08:47.180020 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:08:47Z","lastTransitionTime":"2025-09-29T19:08:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:08:47 crc kubenswrapper[4779]: I0929 19:08:47.283255 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:47 crc kubenswrapper[4779]: I0929 19:08:47.283312 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:47 crc kubenswrapper[4779]: I0929 19:08:47.283342 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:47 crc kubenswrapper[4779]: I0929 19:08:47.283359 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:08:47 crc kubenswrapper[4779]: I0929 19:08:47.283371 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:08:47Z","lastTransitionTime":"2025-09-29T19:08:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:08:47 crc kubenswrapper[4779]: I0929 19:08:47.387000 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:47 crc kubenswrapper[4779]: I0929 19:08:47.387215 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:47 crc kubenswrapper[4779]: I0929 19:08:47.387237 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:47 crc kubenswrapper[4779]: I0929 19:08:47.387262 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:08:47 crc kubenswrapper[4779]: I0929 19:08:47.387282 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:08:47Z","lastTransitionTime":"2025-09-29T19:08:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:08:47 crc kubenswrapper[4779]: I0929 19:08:47.490534 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:47 crc kubenswrapper[4779]: I0929 19:08:47.490627 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:47 crc kubenswrapper[4779]: I0929 19:08:47.490641 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:47 crc kubenswrapper[4779]: I0929 19:08:47.490670 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:08:47 crc kubenswrapper[4779]: I0929 19:08:47.490686 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:08:47Z","lastTransitionTime":"2025-09-29T19:08:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:08:47 crc kubenswrapper[4779]: I0929 19:08:47.593903 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:47 crc kubenswrapper[4779]: I0929 19:08:47.593979 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:47 crc kubenswrapper[4779]: I0929 19:08:47.594004 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:47 crc kubenswrapper[4779]: I0929 19:08:47.594034 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:08:47 crc kubenswrapper[4779]: I0929 19:08:47.594058 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:08:47Z","lastTransitionTime":"2025-09-29T19:08:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:08:47 crc kubenswrapper[4779]: I0929 19:08:47.697407 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:47 crc kubenswrapper[4779]: I0929 19:08:47.697530 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:47 crc kubenswrapper[4779]: I0929 19:08:47.697548 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:47 crc kubenswrapper[4779]: I0929 19:08:47.697574 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:08:47 crc kubenswrapper[4779]: I0929 19:08:47.697591 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:08:47Z","lastTransitionTime":"2025-09-29T19:08:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:08:47 crc kubenswrapper[4779]: I0929 19:08:47.800537 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:47 crc kubenswrapper[4779]: I0929 19:08:47.800604 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:47 crc kubenswrapper[4779]: I0929 19:08:47.800635 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:47 crc kubenswrapper[4779]: I0929 19:08:47.800660 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:08:47 crc kubenswrapper[4779]: I0929 19:08:47.800677 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:08:47Z","lastTransitionTime":"2025-09-29T19:08:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:08:47 crc kubenswrapper[4779]: I0929 19:08:47.904705 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:47 crc kubenswrapper[4779]: I0929 19:08:47.904762 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:47 crc kubenswrapper[4779]: I0929 19:08:47.904776 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:47 crc kubenswrapper[4779]: I0929 19:08:47.904801 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:08:47 crc kubenswrapper[4779]: I0929 19:08:47.904818 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:08:47Z","lastTransitionTime":"2025-09-29T19:08:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:08:48 crc kubenswrapper[4779]: I0929 19:08:48.007744 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:48 crc kubenswrapper[4779]: I0929 19:08:48.007787 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:48 crc kubenswrapper[4779]: I0929 19:08:48.007796 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:48 crc kubenswrapper[4779]: I0929 19:08:48.007810 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:08:48 crc kubenswrapper[4779]: I0929 19:08:48.007819 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:08:48Z","lastTransitionTime":"2025-09-29T19:08:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:08:48 crc kubenswrapper[4779]: I0929 19:08:48.111127 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:48 crc kubenswrapper[4779]: I0929 19:08:48.111235 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:48 crc kubenswrapper[4779]: I0929 19:08:48.111257 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:48 crc kubenswrapper[4779]: I0929 19:08:48.111287 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:08:48 crc kubenswrapper[4779]: I0929 19:08:48.111307 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:08:48Z","lastTransitionTime":"2025-09-29T19:08:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:08:48 crc kubenswrapper[4779]: I0929 19:08:48.214142 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:48 crc kubenswrapper[4779]: I0929 19:08:48.214188 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:48 crc kubenswrapper[4779]: I0929 19:08:48.214197 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:48 crc kubenswrapper[4779]: I0929 19:08:48.214213 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:08:48 crc kubenswrapper[4779]: I0929 19:08:48.214224 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:08:48Z","lastTransitionTime":"2025-09-29T19:08:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:08:48 crc kubenswrapper[4779]: I0929 19:08:48.317570 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:48 crc kubenswrapper[4779]: I0929 19:08:48.317652 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:48 crc kubenswrapper[4779]: I0929 19:08:48.317676 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:48 crc kubenswrapper[4779]: I0929 19:08:48.317713 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:08:48 crc kubenswrapper[4779]: I0929 19:08:48.317748 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:08:48Z","lastTransitionTime":"2025-09-29T19:08:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:08:48 crc kubenswrapper[4779]: I0929 19:08:48.394557 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/4df079c4-34e3-4132-91bb-ad68488552f8-metrics-certs\") pod \"network-metrics-daemon-2rtwf\" (UID: \"4df079c4-34e3-4132-91bb-ad68488552f8\") " pod="openshift-multus/network-metrics-daemon-2rtwf" Sep 29 19:08:48 crc kubenswrapper[4779]: E0929 19:08:48.394685 4779 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Sep 29 19:08:48 crc kubenswrapper[4779]: E0929 19:08:48.394751 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/4df079c4-34e3-4132-91bb-ad68488552f8-metrics-certs podName:4df079c4-34e3-4132-91bb-ad68488552f8 nodeName:}" failed. No retries permitted until 2025-09-29 19:08:52.394734189 +0000 UTC m=+43.279159289 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/4df079c4-34e3-4132-91bb-ad68488552f8-metrics-certs") pod "network-metrics-daemon-2rtwf" (UID: "4df079c4-34e3-4132-91bb-ad68488552f8") : object "openshift-multus"/"metrics-daemon-secret" not registered Sep 29 19:08:48 crc kubenswrapper[4779]: I0929 19:08:48.421674 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:48 crc kubenswrapper[4779]: I0929 19:08:48.421734 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:48 crc kubenswrapper[4779]: I0929 19:08:48.421746 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:48 crc kubenswrapper[4779]: I0929 19:08:48.421768 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:08:48 crc kubenswrapper[4779]: I0929 19:08:48.421784 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:08:48Z","lastTransitionTime":"2025-09-29T19:08:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:08:48 crc kubenswrapper[4779]: I0929 19:08:48.525063 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:48 crc kubenswrapper[4779]: I0929 19:08:48.525118 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:48 crc kubenswrapper[4779]: I0929 19:08:48.525134 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:48 crc kubenswrapper[4779]: I0929 19:08:48.525159 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:08:48 crc kubenswrapper[4779]: I0929 19:08:48.525180 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:08:48Z","lastTransitionTime":"2025-09-29T19:08:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:08:48 crc kubenswrapper[4779]: I0929 19:08:48.628182 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:48 crc kubenswrapper[4779]: I0929 19:08:48.628302 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:48 crc kubenswrapper[4779]: I0929 19:08:48.628359 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:48 crc kubenswrapper[4779]: I0929 19:08:48.628391 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:08:48 crc kubenswrapper[4779]: I0929 19:08:48.628409 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:08:48Z","lastTransitionTime":"2025-09-29T19:08:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:08:48 crc kubenswrapper[4779]: I0929 19:08:48.731531 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:48 crc kubenswrapper[4779]: I0929 19:08:48.731607 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:48 crc kubenswrapper[4779]: I0929 19:08:48.731623 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:48 crc kubenswrapper[4779]: I0929 19:08:48.731646 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:08:48 crc kubenswrapper[4779]: I0929 19:08:48.731659 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:08:48Z","lastTransitionTime":"2025-09-29T19:08:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:08:48 crc kubenswrapper[4779]: I0929 19:08:48.765753 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 19:08:48 crc kubenswrapper[4779]: I0929 19:08:48.765791 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 19:08:48 crc kubenswrapper[4779]: E0929 19:08:48.765960 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 19:08:48 crc kubenswrapper[4779]: I0929 19:08:48.765990 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-2rtwf" Sep 29 19:08:48 crc kubenswrapper[4779]: I0929 19:08:48.766053 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 19:08:48 crc kubenswrapper[4779]: E0929 19:08:48.766185 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-2rtwf" podUID="4df079c4-34e3-4132-91bb-ad68488552f8" Sep 29 19:08:48 crc kubenswrapper[4779]: E0929 19:08:48.766279 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 19:08:48 crc kubenswrapper[4779]: E0929 19:08:48.766409 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 19:08:48 crc kubenswrapper[4779]: I0929 19:08:48.834645 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:48 crc kubenswrapper[4779]: I0929 19:08:48.834685 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:48 crc kubenswrapper[4779]: I0929 19:08:48.834696 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:48 crc kubenswrapper[4779]: I0929 19:08:48.834711 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:08:48 crc kubenswrapper[4779]: I0929 19:08:48.834722 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:08:48Z","lastTransitionTime":"2025-09-29T19:08:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:08:48 crc kubenswrapper[4779]: I0929 19:08:48.937707 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:48 crc kubenswrapper[4779]: I0929 19:08:48.937797 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:48 crc kubenswrapper[4779]: I0929 19:08:48.937822 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:48 crc kubenswrapper[4779]: I0929 19:08:48.937852 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:08:48 crc kubenswrapper[4779]: I0929 19:08:48.937874 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:08:48Z","lastTransitionTime":"2025-09-29T19:08:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:08:49 crc kubenswrapper[4779]: I0929 19:08:49.041454 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:49 crc kubenswrapper[4779]: I0929 19:08:49.041511 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:49 crc kubenswrapper[4779]: I0929 19:08:49.041528 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:49 crc kubenswrapper[4779]: I0929 19:08:49.041550 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:08:49 crc kubenswrapper[4779]: I0929 19:08:49.041567 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:08:49Z","lastTransitionTime":"2025-09-29T19:08:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:08:49 crc kubenswrapper[4779]: I0929 19:08:49.144449 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:49 crc kubenswrapper[4779]: I0929 19:08:49.144515 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:49 crc kubenswrapper[4779]: I0929 19:08:49.144528 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:49 crc kubenswrapper[4779]: I0929 19:08:49.144552 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:08:49 crc kubenswrapper[4779]: I0929 19:08:49.144569 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:08:49Z","lastTransitionTime":"2025-09-29T19:08:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:08:49 crc kubenswrapper[4779]: I0929 19:08:49.247855 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:49 crc kubenswrapper[4779]: I0929 19:08:49.247917 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:49 crc kubenswrapper[4779]: I0929 19:08:49.247931 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:49 crc kubenswrapper[4779]: I0929 19:08:49.247953 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:08:49 crc kubenswrapper[4779]: I0929 19:08:49.247969 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:08:49Z","lastTransitionTime":"2025-09-29T19:08:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:08:49 crc kubenswrapper[4779]: I0929 19:08:49.350788 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:49 crc kubenswrapper[4779]: I0929 19:08:49.350923 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:49 crc kubenswrapper[4779]: I0929 19:08:49.350945 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:49 crc kubenswrapper[4779]: I0929 19:08:49.350975 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:08:49 crc kubenswrapper[4779]: I0929 19:08:49.351000 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:08:49Z","lastTransitionTime":"2025-09-29T19:08:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:08:49 crc kubenswrapper[4779]: I0929 19:08:49.454483 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:49 crc kubenswrapper[4779]: I0929 19:08:49.454550 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:49 crc kubenswrapper[4779]: I0929 19:08:49.454562 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:49 crc kubenswrapper[4779]: I0929 19:08:49.454582 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:08:49 crc kubenswrapper[4779]: I0929 19:08:49.454594 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:08:49Z","lastTransitionTime":"2025-09-29T19:08:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:08:49 crc kubenswrapper[4779]: I0929 19:08:49.558514 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:49 crc kubenswrapper[4779]: I0929 19:08:49.558606 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:49 crc kubenswrapper[4779]: I0929 19:08:49.558639 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:49 crc kubenswrapper[4779]: I0929 19:08:49.558674 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:08:49 crc kubenswrapper[4779]: I0929 19:08:49.558696 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:08:49Z","lastTransitionTime":"2025-09-29T19:08:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:08:49 crc kubenswrapper[4779]: I0929 19:08:49.661642 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:49 crc kubenswrapper[4779]: I0929 19:08:49.661708 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:49 crc kubenswrapper[4779]: I0929 19:08:49.661720 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:49 crc kubenswrapper[4779]: I0929 19:08:49.661737 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:08:49 crc kubenswrapper[4779]: I0929 19:08:49.661749 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:08:49Z","lastTransitionTime":"2025-09-29T19:08:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:08:49 crc kubenswrapper[4779]: I0929 19:08:49.764162 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:49 crc kubenswrapper[4779]: I0929 19:08:49.764248 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:49 crc kubenswrapper[4779]: I0929 19:08:49.764259 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:49 crc kubenswrapper[4779]: I0929 19:08:49.764275 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:08:49 crc kubenswrapper[4779]: I0929 19:08:49.764289 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:08:49Z","lastTransitionTime":"2025-09-29T19:08:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:08:49 crc kubenswrapper[4779]: I0929 19:08:49.787139 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"49dd46af-a7fa-47b0-94ab-c9a999564fca\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a67efc46aeb5956568b680b4041d83a55c0410ccefdb4e43b95503c6416e1dfc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2253a3406f6de9463c1c615ad2ac76ce935067baa57b260f18afdb8d9e639e95\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa9b804f65dc9265342d6ec10b8cdd31d49f692289c7fd8292944e25a2c95ebe\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://02428a90ed2dd1c36d800ffda10ed8759d92a87ec8f467cdf9fb11511b9b9420\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:09Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:49Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:49 crc kubenswrapper[4779]: I0929 19:08:49.805199 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ef5a50f788b42b32f4ec04eedc9f5c9dbcd07f64b430ec684d8a91b414e6c18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb910a5c408a63f6c4391cf2fae320e14d6dc945125b53eefde3b666ef9d0754\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:49Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:49 crc kubenswrapper[4779]: I0929 19:08:49.824877 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://15b9c6a3d7c79b30673cd8040a9f3033e2405cf2e29b38d8ba2bee9cbb384794\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:49Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:49 crc kubenswrapper[4779]: I0929 19:08:49.844057 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jfbb6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3ac24bbf-c37a-4253-be71-8d8f15cfd48e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2ea6959ae00f851559cc059498a3a7848fbbb55f6a25ed82e38efe62a7ee85ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8ckjq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:30Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jfbb6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:49Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:49 crc kubenswrapper[4779]: I0929 19:08:49.867233 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:49 crc kubenswrapper[4779]: I0929 19:08:49.867280 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:49 crc kubenswrapper[4779]: I0929 19:08:49.867298 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:49 crc kubenswrapper[4779]: I0929 19:08:49.867341 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:08:49 crc kubenswrapper[4779]: I0929 19:08:49.867357 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:08:49Z","lastTransitionTime":"2025-09-29T19:08:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:08:49 crc kubenswrapper[4779]: I0929 19:08:49.867165 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-cvx8m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"54a33b8e-b623-4f91-be1d-a38dfcef17d7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://49599fb872eb458cb3fd77da63518f77653f671e82a8fc1db5f9e2b64e3c4d1d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e697e7c54a1b8adfde2dfa86399bfd13d895196c12108ac625bd9b94ad913d1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e697e7c54a1b8adfde2dfa86399bfd13d895196c12108ac625bd9b94ad913d1e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://75932bd27c986408eb2a3ec9aefffa392bf1ede4be0ddeabb9fdcd5fcc56199d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://75932bd27c986408eb2a3ec9aefffa392bf1ede4be0ddeabb9fdcd5fcc56199d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e058271bf65bb6f216298a9fc58b97c76335275829be7e5bec59d92688e83a05\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e058271bf65bb6f216298a9fc58b97c76335275829be7e5bec59d92688e83a05\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://edec2f7ae27e72dcc2eac62281dc9b866cdc31ecbda5e24020043aad86a44784\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://edec2f7ae27e72dcc2eac62281dc9b866cdc31ecbda5e24020043aad86a44784\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1750034ffcb61628b437e7b4f604be66ea6401d0ddb8a70f37fbb358c261670f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1750034ffcb61628b437e7b4f604be66ea6401d0ddb8a70f37fbb358c261670f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3409883fc5d9ed6b5e08d50c9d2821a4ac0b8932c9a0296c12065b0abb0354d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3409883fc5d9ed6b5e08d50c9d2821a4ac0b8932c9a0296c12065b0abb0354d6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:30Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-cvx8m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:49Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:49 crc kubenswrapper[4779]: I0929 19:08:49.882482 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2f701cb-2e7d-4cea-8455-f68605964ac6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dfa32890cde2a0a47211239f797f83380b4418ad2b8fb1560c779705e68df39c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a017f2621de03e0184a0a168819a6762e4b36a1ae5c66a6bcfd1d65266fcb52\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a9691127cc0437ceebef7a82219adb83625690db4d6bf5cf1c8177a6a9e1284\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b14ae0b08f077b3acbe802e4a5ec06dd10075100c37d5587119a09a92b4651a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b14ae0b08f077b3acbe802e4a5ec06dd10075100c37d5587119a09a92b4651a\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T19:08:29Z\\\",\\\"message\\\":\\\"le observer\\\\nW0929 19:08:29.408025 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0929 19:08:29.408423 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 19:08:29.411188 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1038548135/tls.crt::/tmp/serving-cert-1038548135/tls.key\\\\\\\"\\\\nI0929 19:08:29.848353 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0929 19:08:29.866865 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0929 19:08:29.866980 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0929 19:08:29.867032 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0929 19:08:29.867061 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0929 19:08:29.872666 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0929 19:08:29.872766 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0929 19:08:29.872793 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0929 19:08:29.872817 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0929 19:08:29.872839 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0929 19:08:29.872861 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0929 19:08:29.872883 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0929 19:08:29.873077 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0929 19:08:29.875629 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:24Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f0916d37ad679d190403229b40390af2860e24f777a07cd5ef0ac2f7d3fc3c41\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5a737775c31cf8dc37838d904ee00f1075bc2aa52bea63587806cb7486950eaf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5a737775c31cf8dc37838d904ee00f1075bc2aa52bea63587806cb7486950eaf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:09Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:49Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:49 crc kubenswrapper[4779]: I0929 19:08:49.897598 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:49Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:49 crc kubenswrapper[4779]: I0929 19:08:49.923804 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-42vjg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"046df2ef-fb75-4d32-93e6-17b36af0a7c2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://629d703a556665e86ce3b0a5af995157440c71c8ab7d29b9259865cd624543c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1555e96e70a9119cf2298d91512b1d8cffb303b4327d0e20bdf686e7e36e4461\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0d18ddf7528c789be80d32cd6921ada5ed114d01130b7be97e73ec6d9168267e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b6ffbcb52a6e250cd4b4bc4e8c089ef94babaaf0db836fec9f477450464eebe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://76304a56be996b646d706c0e7df995bb1518962396c7558304b1627e5c215c11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6dcd0277739f6d5e5c90c617485f053c1c3996192f15b081c660741987ec2611\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://418a68d2ab745fecfe78084a541301c6af31b6ece2ccb58d61be58b2989235ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6b07760e693cb3b51af82207169b6c5af762338919293e6d3d2e79a95649bd1f\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T19:08:40Z\\\",\\\"message\\\":\\\"9 19:08:40.383515 6050 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0929 19:08:40.383552 6050 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0929 19:08:40.383581 6050 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0929 19:08:40.383612 6050 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI0929 19:08:40.383640 6050 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI0929 19:08:40.383646 6050 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI0929 19:08:40.383677 6050 handler.go:208] Removed *v1.Pod event handler 6\\\\nI0929 19:08:40.383692 6050 handler.go:208] Removed *v1.Node event handler 2\\\\nI0929 19:08:40.383702 6050 handler.go:208] Removed *v1.Node event handler 7\\\\nI0929 19:08:40.383712 6050 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0929 19:08:40.383721 6050 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI0929 19:08:40.383731 6050 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0929 19:08:40.383740 6050 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0929 19:08:40.383750 6050 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI0929 19:08:40.383759 6050 handler.go:208] Removed *v1.Pod event handler 3\\\\nI0929 19:08:40.383870 6050 factory.go:656] Stopping \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:37Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://418a68d2ab745fecfe78084a541301c6af31b6ece2ccb58d61be58b2989235ba\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T19:08:41Z\\\",\\\"message\\\":\\\"rr: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:41Z is after 2025-08-24T17:21:41Z]\\\\nI0929 19:08:41.905972 6175 model_client.go:382] Update operations generated as: [{Op:update Table:NAT Row:map[external_ip:192.168.126.11 logical_ip:10.217.0.92 options:{GoMap:map[stateless:false]} type:snat] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {73135118-cf1b-4568-bd31-2f50308bf69d}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI0929 19:08:41.905727 6175 services_controller.go:434] Service default/kubernetes retrieved from lister for network=default: \\\\u0026Service{ObjectMeta:{kubernetes default 1fcaffea-cfe2-4295-9c2a-a3b3626fb3f1 259 0 2025-02-23 05:11:12 +0000 UTC \\\\u003cnil\\\\u003e \\\\u003cnil\\\\u003e map[component:apiserver provider:kubernetes] map[] [] [] []},Spec:Servic\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c8993f5be0b77b752f60e7346f609aaeef20db2f0e72cbebde06a11fa7d2f5bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e1e03c63d08ebe2a4b76954140684a5b43da9e92b93490aa7efa715fc2aefb4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e1e03c63d08ebe2a4b76954140684a5b43da9e92b93490aa7efa715fc2aefb4c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:30Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-42vjg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:49Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:49 crc kubenswrapper[4779]: I0929 19:08:49.940410 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-zxpg4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e0db9b28-7e3a-4d44-9e98-1a07c8e5b8d6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2f0f837dfb05912fb4f7b0cb08cdb363cb3bfe2cc91a0bebd1b2b2b8128d78cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6ghv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:33Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-zxpg4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:49Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:49 crc kubenswrapper[4779]: I0929 19:08:49.962083 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:49Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:49 crc kubenswrapper[4779]: I0929 19:08:49.970219 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:49 crc kubenswrapper[4779]: I0929 19:08:49.970279 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:49 crc kubenswrapper[4779]: I0929 19:08:49.970298 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:49 crc kubenswrapper[4779]: I0929 19:08:49.970350 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:08:49 crc kubenswrapper[4779]: I0929 19:08:49.970370 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:08:49Z","lastTransitionTime":"2025-09-29T19:08:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:08:49 crc kubenswrapper[4779]: I0929 19:08:49.982206 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fvdtc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"97c4781c-2d4b-4eab-96fb-39a342c2d4a0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c2179d9cf26eb6d6fcc63666d3dd53e9a1a66cf633c9fe0b29e6d6ac726c8119\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7cxcv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3aeed56672d12f2355eda710f9021c6271debfaf1f7b7d6b1ff5f4ecc6764e2e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7cxcv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:43Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-fvdtc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:49Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:49 crc kubenswrapper[4779]: I0929 19:08:49.999305 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-7hb2m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"45b89d12-bbd2-4b47-815d-a7421cc1aa00\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://002d12663649198e6136f3e9df16a3b1cd8c64906bf39bde03cd0dde8c36abbc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bvn4g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:30Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-7hb2m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:49Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:50 crc kubenswrapper[4779]: I0929 19:08:50.020203 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:50Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:50 crc kubenswrapper[4779]: I0929 19:08:50.039854 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://04d9dd8d319497dd3ed7c565217d8b9d228a1952a3cd0e8dee3cdad781c5770a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:50Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:50 crc kubenswrapper[4779]: I0929 19:08:50.060888 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"476bc421-1113-455e-bcc8-e207e47dad19\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://30844145987b3dae28d2ca3f75a36e21c27216b267ff40fdb83094b8045403c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tb7zk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7168ecfffda2ef4f7782650466ccebb8abcff3293fcd33d44eb4ee24abf6339c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tb7zk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:30Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-d5cnr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:50Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:50 crc kubenswrapper[4779]: I0929 19:08:50.073833 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:50 crc kubenswrapper[4779]: I0929 19:08:50.073909 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:50 crc kubenswrapper[4779]: I0929 19:08:50.073931 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:50 crc kubenswrapper[4779]: I0929 19:08:50.073953 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:08:50 crc kubenswrapper[4779]: I0929 19:08:50.073968 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:08:50Z","lastTransitionTime":"2025-09-29T19:08:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:08:50 crc kubenswrapper[4779]: I0929 19:08:50.076929 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-2rtwf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4df079c4-34e3-4132-91bb-ad68488552f8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7sr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7sr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:44Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-2rtwf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:50Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:50 crc kubenswrapper[4779]: I0929 19:08:50.177684 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:50 crc kubenswrapper[4779]: I0929 19:08:50.177744 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:50 crc kubenswrapper[4779]: I0929 19:08:50.177765 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:50 crc kubenswrapper[4779]: I0929 19:08:50.177788 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:08:50 crc kubenswrapper[4779]: I0929 19:08:50.177809 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:08:50Z","lastTransitionTime":"2025-09-29T19:08:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:08:50 crc kubenswrapper[4779]: I0929 19:08:50.195042 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-42vjg" Sep 29 19:08:50 crc kubenswrapper[4779]: I0929 19:08:50.196363 4779 scope.go:117] "RemoveContainer" containerID="418a68d2ab745fecfe78084a541301c6af31b6ece2ccb58d61be58b2989235ba" Sep 29 19:08:50 crc kubenswrapper[4779]: E0929 19:08:50.196677 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-42vjg_openshift-ovn-kubernetes(046df2ef-fb75-4d32-93e6-17b36af0a7c2)\"" pod="openshift-ovn-kubernetes/ovnkube-node-42vjg" podUID="046df2ef-fb75-4d32-93e6-17b36af0a7c2" Sep 29 19:08:50 crc kubenswrapper[4779]: I0929 19:08:50.217408 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"49dd46af-a7fa-47b0-94ab-c9a999564fca\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a67efc46aeb5956568b680b4041d83a55c0410ccefdb4e43b95503c6416e1dfc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2253a3406f6de9463c1c615ad2ac76ce935067baa57b260f18afdb8d9e639e95\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa9b804f65dc9265342d6ec10b8cdd31d49f692289c7fd8292944e25a2c95ebe\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://02428a90ed2dd1c36d800ffda10ed8759d92a87ec8f467cdf9fb11511b9b9420\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:09Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:50Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:50 crc kubenswrapper[4779]: I0929 19:08:50.232969 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ef5a50f788b42b32f4ec04eedc9f5c9dbcd07f64b430ec684d8a91b414e6c18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb910a5c408a63f6c4391cf2fae320e14d6dc945125b53eefde3b666ef9d0754\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:50Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:50 crc kubenswrapper[4779]: I0929 19:08:50.247990 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://15b9c6a3d7c79b30673cd8040a9f3033e2405cf2e29b38d8ba2bee9cbb384794\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:50Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:50 crc kubenswrapper[4779]: I0929 19:08:50.265477 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jfbb6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3ac24bbf-c37a-4253-be71-8d8f15cfd48e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2ea6959ae00f851559cc059498a3a7848fbbb55f6a25ed82e38efe62a7ee85ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8ckjq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:30Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jfbb6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:50Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:50 crc kubenswrapper[4779]: I0929 19:08:50.280127 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:50 crc kubenswrapper[4779]: I0929 19:08:50.280185 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:50 crc kubenswrapper[4779]: I0929 19:08:50.280203 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:50 crc kubenswrapper[4779]: I0929 19:08:50.280225 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:08:50 crc kubenswrapper[4779]: I0929 19:08:50.280243 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:08:50Z","lastTransitionTime":"2025-09-29T19:08:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:08:50 crc kubenswrapper[4779]: I0929 19:08:50.286263 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-cvx8m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"54a33b8e-b623-4f91-be1d-a38dfcef17d7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://49599fb872eb458cb3fd77da63518f77653f671e82a8fc1db5f9e2b64e3c4d1d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e697e7c54a1b8adfde2dfa86399bfd13d895196c12108ac625bd9b94ad913d1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e697e7c54a1b8adfde2dfa86399bfd13d895196c12108ac625bd9b94ad913d1e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://75932bd27c986408eb2a3ec9aefffa392bf1ede4be0ddeabb9fdcd5fcc56199d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://75932bd27c986408eb2a3ec9aefffa392bf1ede4be0ddeabb9fdcd5fcc56199d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e058271bf65bb6f216298a9fc58b97c76335275829be7e5bec59d92688e83a05\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e058271bf65bb6f216298a9fc58b97c76335275829be7e5bec59d92688e83a05\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://edec2f7ae27e72dcc2eac62281dc9b866cdc31ecbda5e24020043aad86a44784\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://edec2f7ae27e72dcc2eac62281dc9b866cdc31ecbda5e24020043aad86a44784\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1750034ffcb61628b437e7b4f604be66ea6401d0ddb8a70f37fbb358c261670f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1750034ffcb61628b437e7b4f604be66ea6401d0ddb8a70f37fbb358c261670f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3409883fc5d9ed6b5e08d50c9d2821a4ac0b8932c9a0296c12065b0abb0354d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3409883fc5d9ed6b5e08d50c9d2821a4ac0b8932c9a0296c12065b0abb0354d6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:30Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-cvx8m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:50Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:50 crc kubenswrapper[4779]: I0929 19:08:50.301163 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2f701cb-2e7d-4cea-8455-f68605964ac6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dfa32890cde2a0a47211239f797f83380b4418ad2b8fb1560c779705e68df39c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a017f2621de03e0184a0a168819a6762e4b36a1ae5c66a6bcfd1d65266fcb52\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a9691127cc0437ceebef7a82219adb83625690db4d6bf5cf1c8177a6a9e1284\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b14ae0b08f077b3acbe802e4a5ec06dd10075100c37d5587119a09a92b4651a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b14ae0b08f077b3acbe802e4a5ec06dd10075100c37d5587119a09a92b4651a\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T19:08:29Z\\\",\\\"message\\\":\\\"le observer\\\\nW0929 19:08:29.408025 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0929 19:08:29.408423 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 19:08:29.411188 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1038548135/tls.crt::/tmp/serving-cert-1038548135/tls.key\\\\\\\"\\\\nI0929 19:08:29.848353 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0929 19:08:29.866865 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0929 19:08:29.866980 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0929 19:08:29.867032 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0929 19:08:29.867061 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0929 19:08:29.872666 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0929 19:08:29.872766 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0929 19:08:29.872793 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0929 19:08:29.872817 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0929 19:08:29.872839 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0929 19:08:29.872861 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0929 19:08:29.872883 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0929 19:08:29.873077 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0929 19:08:29.875629 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:24Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f0916d37ad679d190403229b40390af2860e24f777a07cd5ef0ac2f7d3fc3c41\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5a737775c31cf8dc37838d904ee00f1075bc2aa52bea63587806cb7486950eaf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5a737775c31cf8dc37838d904ee00f1075bc2aa52bea63587806cb7486950eaf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:09Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:50Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:50 crc kubenswrapper[4779]: I0929 19:08:50.318158 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:50Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:50 crc kubenswrapper[4779]: I0929 19:08:50.348535 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-42vjg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"046df2ef-fb75-4d32-93e6-17b36af0a7c2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://629d703a556665e86ce3b0a5af995157440c71c8ab7d29b9259865cd624543c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1555e96e70a9119cf2298d91512b1d8cffb303b4327d0e20bdf686e7e36e4461\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0d18ddf7528c789be80d32cd6921ada5ed114d01130b7be97e73ec6d9168267e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b6ffbcb52a6e250cd4b4bc4e8c089ef94babaaf0db836fec9f477450464eebe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://76304a56be996b646d706c0e7df995bb1518962396c7558304b1627e5c215c11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6dcd0277739f6d5e5c90c617485f053c1c3996192f15b081c660741987ec2611\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://418a68d2ab745fecfe78084a541301c6af31b6ece2ccb58d61be58b2989235ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://418a68d2ab745fecfe78084a541301c6af31b6ece2ccb58d61be58b2989235ba\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T19:08:41Z\\\",\\\"message\\\":\\\"rr: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:41Z is after 2025-08-24T17:21:41Z]\\\\nI0929 19:08:41.905972 6175 model_client.go:382] Update operations generated as: [{Op:update Table:NAT Row:map[external_ip:192.168.126.11 logical_ip:10.217.0.92 options:{GoMap:map[stateless:false]} type:snat] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {73135118-cf1b-4568-bd31-2f50308bf69d}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI0929 19:08:41.905727 6175 services_controller.go:434] Service default/kubernetes retrieved from lister for network=default: \\\\u0026Service{ObjectMeta:{kubernetes default 1fcaffea-cfe2-4295-9c2a-a3b3626fb3f1 259 0 2025-02-23 05:11:12 +0000 UTC \\\\u003cnil\\\\u003e \\\\u003cnil\\\\u003e map[component:apiserver provider:kubernetes] map[] [] [] []},Spec:Servic\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:41Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-42vjg_openshift-ovn-kubernetes(046df2ef-fb75-4d32-93e6-17b36af0a7c2)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c8993f5be0b77b752f60e7346f609aaeef20db2f0e72cbebde06a11fa7d2f5bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e1e03c63d08ebe2a4b76954140684a5b43da9e92b93490aa7efa715fc2aefb4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e1e03c63d08ebe2a4b76954140684a5b43da9e92b93490aa7efa715fc2aefb4c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:30Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-42vjg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:50Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:50 crc kubenswrapper[4779]: I0929 19:08:50.362493 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-zxpg4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e0db9b28-7e3a-4d44-9e98-1a07c8e5b8d6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2f0f837dfb05912fb4f7b0cb08cdb363cb3bfe2cc91a0bebd1b2b2b8128d78cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6ghv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:33Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-zxpg4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:50Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:50 crc kubenswrapper[4779]: I0929 19:08:50.379114 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:50Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:50 crc kubenswrapper[4779]: I0929 19:08:50.383838 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:50 crc kubenswrapper[4779]: I0929 19:08:50.383899 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:50 crc kubenswrapper[4779]: I0929 19:08:50.383913 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:50 crc kubenswrapper[4779]: I0929 19:08:50.383937 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:08:50 crc kubenswrapper[4779]: I0929 19:08:50.383950 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:08:50Z","lastTransitionTime":"2025-09-29T19:08:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:08:50 crc kubenswrapper[4779]: I0929 19:08:50.394351 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fvdtc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"97c4781c-2d4b-4eab-96fb-39a342c2d4a0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c2179d9cf26eb6d6fcc63666d3dd53e9a1a66cf633c9fe0b29e6d6ac726c8119\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7cxcv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3aeed56672d12f2355eda710f9021c6271debfaf1f7b7d6b1ff5f4ecc6764e2e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7cxcv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:43Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-fvdtc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:50Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:50 crc kubenswrapper[4779]: I0929 19:08:50.408961 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-7hb2m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"45b89d12-bbd2-4b47-815d-a7421cc1aa00\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://002d12663649198e6136f3e9df16a3b1cd8c64906bf39bde03cd0dde8c36abbc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bvn4g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:30Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-7hb2m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:50Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:50 crc kubenswrapper[4779]: I0929 19:08:50.421528 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:50Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:50 crc kubenswrapper[4779]: I0929 19:08:50.436129 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://04d9dd8d319497dd3ed7c565217d8b9d228a1952a3cd0e8dee3cdad781c5770a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:50Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:50 crc kubenswrapper[4779]: I0929 19:08:50.450092 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"476bc421-1113-455e-bcc8-e207e47dad19\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://30844145987b3dae28d2ca3f75a36e21c27216b267ff40fdb83094b8045403c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tb7zk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7168ecfffda2ef4f7782650466ccebb8abcff3293fcd33d44eb4ee24abf6339c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tb7zk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:30Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-d5cnr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:50Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:50 crc kubenswrapper[4779]: I0929 19:08:50.464310 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-2rtwf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4df079c4-34e3-4132-91bb-ad68488552f8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7sr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7sr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:44Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-2rtwf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:50Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:50 crc kubenswrapper[4779]: I0929 19:08:50.486742 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:50 crc kubenswrapper[4779]: I0929 19:08:50.486774 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:50 crc kubenswrapper[4779]: I0929 19:08:50.486786 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:50 crc kubenswrapper[4779]: I0929 19:08:50.486806 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:08:50 crc kubenswrapper[4779]: I0929 19:08:50.486818 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:08:50Z","lastTransitionTime":"2025-09-29T19:08:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:08:50 crc kubenswrapper[4779]: I0929 19:08:50.590685 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:50 crc kubenswrapper[4779]: I0929 19:08:50.590731 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:50 crc kubenswrapper[4779]: I0929 19:08:50.590762 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:50 crc kubenswrapper[4779]: I0929 19:08:50.590781 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:08:50 crc kubenswrapper[4779]: I0929 19:08:50.590793 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:08:50Z","lastTransitionTime":"2025-09-29T19:08:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:08:50 crc kubenswrapper[4779]: I0929 19:08:50.694398 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:50 crc kubenswrapper[4779]: I0929 19:08:50.694463 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:50 crc kubenswrapper[4779]: I0929 19:08:50.694476 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:50 crc kubenswrapper[4779]: I0929 19:08:50.694492 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:08:50 crc kubenswrapper[4779]: I0929 19:08:50.694504 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:08:50Z","lastTransitionTime":"2025-09-29T19:08:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:08:50 crc kubenswrapper[4779]: I0929 19:08:50.766038 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 19:08:50 crc kubenswrapper[4779]: I0929 19:08:50.766138 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 19:08:50 crc kubenswrapper[4779]: I0929 19:08:50.766060 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 19:08:50 crc kubenswrapper[4779]: E0929 19:08:50.766242 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 19:08:50 crc kubenswrapper[4779]: E0929 19:08:50.766391 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 19:08:50 crc kubenswrapper[4779]: E0929 19:08:50.766488 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 19:08:50 crc kubenswrapper[4779]: I0929 19:08:50.766706 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-2rtwf" Sep 29 19:08:50 crc kubenswrapper[4779]: E0929 19:08:50.766794 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-2rtwf" podUID="4df079c4-34e3-4132-91bb-ad68488552f8" Sep 29 19:08:50 crc kubenswrapper[4779]: I0929 19:08:50.797423 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:50 crc kubenswrapper[4779]: I0929 19:08:50.797490 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:50 crc kubenswrapper[4779]: I0929 19:08:50.797511 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:50 crc kubenswrapper[4779]: I0929 19:08:50.797544 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:08:50 crc kubenswrapper[4779]: I0929 19:08:50.797570 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:08:50Z","lastTransitionTime":"2025-09-29T19:08:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:08:50 crc kubenswrapper[4779]: I0929 19:08:50.900076 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:50 crc kubenswrapper[4779]: I0929 19:08:50.900126 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:50 crc kubenswrapper[4779]: I0929 19:08:50.900137 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:50 crc kubenswrapper[4779]: I0929 19:08:50.900154 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:08:50 crc kubenswrapper[4779]: I0929 19:08:50.900166 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:08:50Z","lastTransitionTime":"2025-09-29T19:08:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:08:51 crc kubenswrapper[4779]: I0929 19:08:51.002628 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:51 crc kubenswrapper[4779]: I0929 19:08:51.002716 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:51 crc kubenswrapper[4779]: I0929 19:08:51.002741 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:51 crc kubenswrapper[4779]: I0929 19:08:51.002775 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:08:51 crc kubenswrapper[4779]: I0929 19:08:51.002799 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:08:51Z","lastTransitionTime":"2025-09-29T19:08:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:08:51 crc kubenswrapper[4779]: I0929 19:08:51.105498 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:51 crc kubenswrapper[4779]: I0929 19:08:51.105572 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:51 crc kubenswrapper[4779]: I0929 19:08:51.105595 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:51 crc kubenswrapper[4779]: I0929 19:08:51.105629 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:08:51 crc kubenswrapper[4779]: I0929 19:08:51.105652 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:08:51Z","lastTransitionTime":"2025-09-29T19:08:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:08:51 crc kubenswrapper[4779]: I0929 19:08:51.208208 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:51 crc kubenswrapper[4779]: I0929 19:08:51.208255 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:51 crc kubenswrapper[4779]: I0929 19:08:51.208269 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:51 crc kubenswrapper[4779]: I0929 19:08:51.208289 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:08:51 crc kubenswrapper[4779]: I0929 19:08:51.208302 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:08:51Z","lastTransitionTime":"2025-09-29T19:08:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:08:51 crc kubenswrapper[4779]: I0929 19:08:51.311084 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:51 crc kubenswrapper[4779]: I0929 19:08:51.311134 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:51 crc kubenswrapper[4779]: I0929 19:08:51.311147 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:51 crc kubenswrapper[4779]: I0929 19:08:51.311166 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:08:51 crc kubenswrapper[4779]: I0929 19:08:51.311179 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:08:51Z","lastTransitionTime":"2025-09-29T19:08:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:08:51 crc kubenswrapper[4779]: I0929 19:08:51.414312 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:51 crc kubenswrapper[4779]: I0929 19:08:51.414423 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:51 crc kubenswrapper[4779]: I0929 19:08:51.414442 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:51 crc kubenswrapper[4779]: I0929 19:08:51.414473 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:08:51 crc kubenswrapper[4779]: I0929 19:08:51.414493 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:08:51Z","lastTransitionTime":"2025-09-29T19:08:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:08:51 crc kubenswrapper[4779]: I0929 19:08:51.517732 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:51 crc kubenswrapper[4779]: I0929 19:08:51.517800 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:51 crc kubenswrapper[4779]: I0929 19:08:51.517817 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:51 crc kubenswrapper[4779]: I0929 19:08:51.517840 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:08:51 crc kubenswrapper[4779]: I0929 19:08:51.517868 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:08:51Z","lastTransitionTime":"2025-09-29T19:08:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:08:51 crc kubenswrapper[4779]: I0929 19:08:51.620849 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:51 crc kubenswrapper[4779]: I0929 19:08:51.620948 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:51 crc kubenswrapper[4779]: I0929 19:08:51.620966 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:51 crc kubenswrapper[4779]: I0929 19:08:51.620999 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:08:51 crc kubenswrapper[4779]: I0929 19:08:51.621019 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:08:51Z","lastTransitionTime":"2025-09-29T19:08:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:08:51 crc kubenswrapper[4779]: I0929 19:08:51.724003 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:51 crc kubenswrapper[4779]: I0929 19:08:51.724105 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:51 crc kubenswrapper[4779]: I0929 19:08:51.724123 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:51 crc kubenswrapper[4779]: I0929 19:08:51.724159 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:08:51 crc kubenswrapper[4779]: I0929 19:08:51.724179 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:08:51Z","lastTransitionTime":"2025-09-29T19:08:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:08:51 crc kubenswrapper[4779]: I0929 19:08:51.827526 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:51 crc kubenswrapper[4779]: I0929 19:08:51.827615 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:51 crc kubenswrapper[4779]: I0929 19:08:51.827638 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:51 crc kubenswrapper[4779]: I0929 19:08:51.827676 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:08:51 crc kubenswrapper[4779]: I0929 19:08:51.827699 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:08:51Z","lastTransitionTime":"2025-09-29T19:08:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:08:51 crc kubenswrapper[4779]: I0929 19:08:51.930706 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:51 crc kubenswrapper[4779]: I0929 19:08:51.930758 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:51 crc kubenswrapper[4779]: I0929 19:08:51.930776 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:51 crc kubenswrapper[4779]: I0929 19:08:51.930802 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:08:51 crc kubenswrapper[4779]: I0929 19:08:51.930821 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:08:51Z","lastTransitionTime":"2025-09-29T19:08:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:08:52 crc kubenswrapper[4779]: I0929 19:08:52.034107 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:52 crc kubenswrapper[4779]: I0929 19:08:52.034195 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:52 crc kubenswrapper[4779]: I0929 19:08:52.034219 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:52 crc kubenswrapper[4779]: I0929 19:08:52.034251 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:08:52 crc kubenswrapper[4779]: I0929 19:08:52.034274 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:08:52Z","lastTransitionTime":"2025-09-29T19:08:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:08:52 crc kubenswrapper[4779]: I0929 19:08:52.137000 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:52 crc kubenswrapper[4779]: I0929 19:08:52.137064 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:52 crc kubenswrapper[4779]: I0929 19:08:52.137084 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:52 crc kubenswrapper[4779]: I0929 19:08:52.137109 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:08:52 crc kubenswrapper[4779]: I0929 19:08:52.137128 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:08:52Z","lastTransitionTime":"2025-09-29T19:08:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:08:52 crc kubenswrapper[4779]: I0929 19:08:52.240068 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:52 crc kubenswrapper[4779]: I0929 19:08:52.240151 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:52 crc kubenswrapper[4779]: I0929 19:08:52.240173 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:52 crc kubenswrapper[4779]: I0929 19:08:52.240203 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:08:52 crc kubenswrapper[4779]: I0929 19:08:52.240225 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:08:52Z","lastTransitionTime":"2025-09-29T19:08:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:08:52 crc kubenswrapper[4779]: I0929 19:08:52.342901 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:52 crc kubenswrapper[4779]: I0929 19:08:52.342948 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:52 crc kubenswrapper[4779]: I0929 19:08:52.342963 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:52 crc kubenswrapper[4779]: I0929 19:08:52.342982 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:08:52 crc kubenswrapper[4779]: I0929 19:08:52.342996 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:08:52Z","lastTransitionTime":"2025-09-29T19:08:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:08:52 crc kubenswrapper[4779]: I0929 19:08:52.438808 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/4df079c4-34e3-4132-91bb-ad68488552f8-metrics-certs\") pod \"network-metrics-daemon-2rtwf\" (UID: \"4df079c4-34e3-4132-91bb-ad68488552f8\") " pod="openshift-multus/network-metrics-daemon-2rtwf" Sep 29 19:08:52 crc kubenswrapper[4779]: E0929 19:08:52.438983 4779 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Sep 29 19:08:52 crc kubenswrapper[4779]: E0929 19:08:52.439073 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/4df079c4-34e3-4132-91bb-ad68488552f8-metrics-certs podName:4df079c4-34e3-4132-91bb-ad68488552f8 nodeName:}" failed. No retries permitted until 2025-09-29 19:09:00.439056234 +0000 UTC m=+51.323481334 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/4df079c4-34e3-4132-91bb-ad68488552f8-metrics-certs") pod "network-metrics-daemon-2rtwf" (UID: "4df079c4-34e3-4132-91bb-ad68488552f8") : object "openshift-multus"/"metrics-daemon-secret" not registered Sep 29 19:08:52 crc kubenswrapper[4779]: I0929 19:08:52.445161 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:52 crc kubenswrapper[4779]: I0929 19:08:52.445191 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:52 crc kubenswrapper[4779]: I0929 19:08:52.445199 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:52 crc kubenswrapper[4779]: I0929 19:08:52.445213 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:08:52 crc kubenswrapper[4779]: I0929 19:08:52.445221 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:08:52Z","lastTransitionTime":"2025-09-29T19:08:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:08:52 crc kubenswrapper[4779]: I0929 19:08:52.548790 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:52 crc kubenswrapper[4779]: I0929 19:08:52.548852 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:52 crc kubenswrapper[4779]: I0929 19:08:52.548866 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:52 crc kubenswrapper[4779]: I0929 19:08:52.548886 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:08:52 crc kubenswrapper[4779]: I0929 19:08:52.548899 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:08:52Z","lastTransitionTime":"2025-09-29T19:08:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:08:52 crc kubenswrapper[4779]: I0929 19:08:52.651954 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:52 crc kubenswrapper[4779]: I0929 19:08:52.652011 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:52 crc kubenswrapper[4779]: I0929 19:08:52.652019 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:52 crc kubenswrapper[4779]: I0929 19:08:52.652040 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:08:52 crc kubenswrapper[4779]: I0929 19:08:52.652052 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:08:52Z","lastTransitionTime":"2025-09-29T19:08:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:08:52 crc kubenswrapper[4779]: I0929 19:08:52.755820 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:52 crc kubenswrapper[4779]: I0929 19:08:52.755910 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:52 crc kubenswrapper[4779]: I0929 19:08:52.755936 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:52 crc kubenswrapper[4779]: I0929 19:08:52.755979 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:08:52 crc kubenswrapper[4779]: I0929 19:08:52.756016 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:08:52Z","lastTransitionTime":"2025-09-29T19:08:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:08:52 crc kubenswrapper[4779]: I0929 19:08:52.766173 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 19:08:52 crc kubenswrapper[4779]: I0929 19:08:52.766267 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 19:08:52 crc kubenswrapper[4779]: I0929 19:08:52.766300 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 19:08:52 crc kubenswrapper[4779]: I0929 19:08:52.766173 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-2rtwf" Sep 29 19:08:52 crc kubenswrapper[4779]: E0929 19:08:52.766433 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 19:08:52 crc kubenswrapper[4779]: E0929 19:08:52.766599 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 19:08:52 crc kubenswrapper[4779]: E0929 19:08:52.766755 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-2rtwf" podUID="4df079c4-34e3-4132-91bb-ad68488552f8" Sep 29 19:08:52 crc kubenswrapper[4779]: E0929 19:08:52.766884 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 19:08:52 crc kubenswrapper[4779]: I0929 19:08:52.859872 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:52 crc kubenswrapper[4779]: I0929 19:08:52.859968 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:52 crc kubenswrapper[4779]: I0929 19:08:52.859987 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:52 crc kubenswrapper[4779]: I0929 19:08:52.860017 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:08:52 crc kubenswrapper[4779]: I0929 19:08:52.860037 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:08:52Z","lastTransitionTime":"2025-09-29T19:08:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:08:52 crc kubenswrapper[4779]: I0929 19:08:52.963198 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:52 crc kubenswrapper[4779]: I0929 19:08:52.963274 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:52 crc kubenswrapper[4779]: I0929 19:08:52.963287 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:52 crc kubenswrapper[4779]: I0929 19:08:52.963309 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:08:52 crc kubenswrapper[4779]: I0929 19:08:52.963392 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:08:52Z","lastTransitionTime":"2025-09-29T19:08:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:08:53 crc kubenswrapper[4779]: I0929 19:08:53.066628 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:53 crc kubenswrapper[4779]: I0929 19:08:53.066694 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:53 crc kubenswrapper[4779]: I0929 19:08:53.066709 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:53 crc kubenswrapper[4779]: I0929 19:08:53.066737 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:08:53 crc kubenswrapper[4779]: I0929 19:08:53.066753 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:08:53Z","lastTransitionTime":"2025-09-29T19:08:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:08:53 crc kubenswrapper[4779]: I0929 19:08:53.170507 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:53 crc kubenswrapper[4779]: I0929 19:08:53.170564 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:53 crc kubenswrapper[4779]: I0929 19:08:53.170580 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:53 crc kubenswrapper[4779]: I0929 19:08:53.170602 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:08:53 crc kubenswrapper[4779]: I0929 19:08:53.170620 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:08:53Z","lastTransitionTime":"2025-09-29T19:08:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:08:53 crc kubenswrapper[4779]: I0929 19:08:53.275367 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:53 crc kubenswrapper[4779]: I0929 19:08:53.275443 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:53 crc kubenswrapper[4779]: I0929 19:08:53.275465 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:53 crc kubenswrapper[4779]: I0929 19:08:53.275494 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:08:53 crc kubenswrapper[4779]: I0929 19:08:53.275515 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:08:53Z","lastTransitionTime":"2025-09-29T19:08:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:08:53 crc kubenswrapper[4779]: I0929 19:08:53.378964 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:53 crc kubenswrapper[4779]: I0929 19:08:53.379029 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:53 crc kubenswrapper[4779]: I0929 19:08:53.379048 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:53 crc kubenswrapper[4779]: I0929 19:08:53.379076 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:08:53 crc kubenswrapper[4779]: I0929 19:08:53.379094 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:08:53Z","lastTransitionTime":"2025-09-29T19:08:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:08:53 crc kubenswrapper[4779]: I0929 19:08:53.482271 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:53 crc kubenswrapper[4779]: I0929 19:08:53.482372 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:53 crc kubenswrapper[4779]: I0929 19:08:53.482394 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:53 crc kubenswrapper[4779]: I0929 19:08:53.482422 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:08:53 crc kubenswrapper[4779]: I0929 19:08:53.482441 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:08:53Z","lastTransitionTime":"2025-09-29T19:08:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:08:53 crc kubenswrapper[4779]: I0929 19:08:53.585233 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:53 crc kubenswrapper[4779]: I0929 19:08:53.585278 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:53 crc kubenswrapper[4779]: I0929 19:08:53.585288 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:53 crc kubenswrapper[4779]: I0929 19:08:53.585308 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:08:53 crc kubenswrapper[4779]: I0929 19:08:53.585345 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:08:53Z","lastTransitionTime":"2025-09-29T19:08:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:08:53 crc kubenswrapper[4779]: I0929 19:08:53.688014 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:53 crc kubenswrapper[4779]: I0929 19:08:53.688077 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:53 crc kubenswrapper[4779]: I0929 19:08:53.688088 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:53 crc kubenswrapper[4779]: I0929 19:08:53.688108 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:08:53 crc kubenswrapper[4779]: I0929 19:08:53.688118 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:08:53Z","lastTransitionTime":"2025-09-29T19:08:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:08:53 crc kubenswrapper[4779]: I0929 19:08:53.766585 4779 scope.go:117] "RemoveContainer" containerID="7b14ae0b08f077b3acbe802e4a5ec06dd10075100c37d5587119a09a92b4651a" Sep 29 19:08:53 crc kubenswrapper[4779]: I0929 19:08:53.790699 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:53 crc kubenswrapper[4779]: I0929 19:08:53.790745 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:53 crc kubenswrapper[4779]: I0929 19:08:53.790756 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:53 crc kubenswrapper[4779]: I0929 19:08:53.790776 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:08:53 crc kubenswrapper[4779]: I0929 19:08:53.790788 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:08:53Z","lastTransitionTime":"2025-09-29T19:08:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:08:53 crc kubenswrapper[4779]: I0929 19:08:53.893550 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:53 crc kubenswrapper[4779]: I0929 19:08:53.893584 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:53 crc kubenswrapper[4779]: I0929 19:08:53.893594 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:53 crc kubenswrapper[4779]: I0929 19:08:53.893610 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:08:53 crc kubenswrapper[4779]: I0929 19:08:53.893621 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:08:53Z","lastTransitionTime":"2025-09-29T19:08:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:08:53 crc kubenswrapper[4779]: I0929 19:08:53.996882 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:53 crc kubenswrapper[4779]: I0929 19:08:53.996943 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:53 crc kubenswrapper[4779]: I0929 19:08:53.996965 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:53 crc kubenswrapper[4779]: I0929 19:08:53.996990 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:08:53 crc kubenswrapper[4779]: I0929 19:08:53.997009 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:08:53Z","lastTransitionTime":"2025-09-29T19:08:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:08:54 crc kubenswrapper[4779]: I0929 19:08:54.066077 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/1.log" Sep 29 19:08:54 crc kubenswrapper[4779]: I0929 19:08:54.100261 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:54 crc kubenswrapper[4779]: I0929 19:08:54.100422 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:54 crc kubenswrapper[4779]: I0929 19:08:54.100456 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:54 crc kubenswrapper[4779]: I0929 19:08:54.100489 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:08:54 crc kubenswrapper[4779]: I0929 19:08:54.100515 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:08:54Z","lastTransitionTime":"2025-09-29T19:08:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:08:54 crc kubenswrapper[4779]: I0929 19:08:54.203515 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:54 crc kubenswrapper[4779]: I0929 19:08:54.203554 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:54 crc kubenswrapper[4779]: I0929 19:08:54.203569 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:54 crc kubenswrapper[4779]: I0929 19:08:54.203586 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:08:54 crc kubenswrapper[4779]: I0929 19:08:54.203599 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:08:54Z","lastTransitionTime":"2025-09-29T19:08:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:08:54 crc kubenswrapper[4779]: I0929 19:08:54.305888 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:54 crc kubenswrapper[4779]: I0929 19:08:54.305929 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:54 crc kubenswrapper[4779]: I0929 19:08:54.305940 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:54 crc kubenswrapper[4779]: I0929 19:08:54.305955 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:08:54 crc kubenswrapper[4779]: I0929 19:08:54.305968 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:08:54Z","lastTransitionTime":"2025-09-29T19:08:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:08:54 crc kubenswrapper[4779]: I0929 19:08:54.408984 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:54 crc kubenswrapper[4779]: I0929 19:08:54.409045 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:54 crc kubenswrapper[4779]: I0929 19:08:54.409060 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:54 crc kubenswrapper[4779]: I0929 19:08:54.409085 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:08:54 crc kubenswrapper[4779]: I0929 19:08:54.409103 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:08:54Z","lastTransitionTime":"2025-09-29T19:08:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:08:54 crc kubenswrapper[4779]: I0929 19:08:54.513572 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:54 crc kubenswrapper[4779]: I0929 19:08:54.513652 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:54 crc kubenswrapper[4779]: I0929 19:08:54.513676 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:54 crc kubenswrapper[4779]: I0929 19:08:54.513708 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:08:54 crc kubenswrapper[4779]: I0929 19:08:54.513734 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:08:54Z","lastTransitionTime":"2025-09-29T19:08:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:08:54 crc kubenswrapper[4779]: I0929 19:08:54.617150 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:54 crc kubenswrapper[4779]: I0929 19:08:54.617271 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:54 crc kubenswrapper[4779]: I0929 19:08:54.617302 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:54 crc kubenswrapper[4779]: I0929 19:08:54.617368 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:08:54 crc kubenswrapper[4779]: I0929 19:08:54.617396 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:08:54Z","lastTransitionTime":"2025-09-29T19:08:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:08:54 crc kubenswrapper[4779]: I0929 19:08:54.720709 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:54 crc kubenswrapper[4779]: I0929 19:08:54.720767 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:54 crc kubenswrapper[4779]: I0929 19:08:54.720785 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:54 crc kubenswrapper[4779]: I0929 19:08:54.720807 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:08:54 crc kubenswrapper[4779]: I0929 19:08:54.720824 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:08:54Z","lastTransitionTime":"2025-09-29T19:08:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:08:54 crc kubenswrapper[4779]: I0929 19:08:54.765853 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 19:08:54 crc kubenswrapper[4779]: I0929 19:08:54.765931 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 19:08:54 crc kubenswrapper[4779]: I0929 19:08:54.766047 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 19:08:54 crc kubenswrapper[4779]: E0929 19:08:54.766045 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 19:08:54 crc kubenswrapper[4779]: I0929 19:08:54.766119 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-2rtwf" Sep 29 19:08:54 crc kubenswrapper[4779]: E0929 19:08:54.766273 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 19:08:54 crc kubenswrapper[4779]: E0929 19:08:54.766472 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-2rtwf" podUID="4df079c4-34e3-4132-91bb-ad68488552f8" Sep 29 19:08:54 crc kubenswrapper[4779]: E0929 19:08:54.766620 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 19:08:54 crc kubenswrapper[4779]: I0929 19:08:54.824188 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:54 crc kubenswrapper[4779]: I0929 19:08:54.824252 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:54 crc kubenswrapper[4779]: I0929 19:08:54.824270 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:54 crc kubenswrapper[4779]: I0929 19:08:54.824352 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:08:54 crc kubenswrapper[4779]: I0929 19:08:54.824386 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:08:54Z","lastTransitionTime":"2025-09-29T19:08:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:08:54 crc kubenswrapper[4779]: I0929 19:08:54.928079 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:54 crc kubenswrapper[4779]: I0929 19:08:54.928158 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:54 crc kubenswrapper[4779]: I0929 19:08:54.928182 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:54 crc kubenswrapper[4779]: I0929 19:08:54.928734 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:08:54 crc kubenswrapper[4779]: I0929 19:08:54.929453 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:08:54Z","lastTransitionTime":"2025-09-29T19:08:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:08:55 crc kubenswrapper[4779]: I0929 19:08:55.032569 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:55 crc kubenswrapper[4779]: I0929 19:08:55.032634 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:55 crc kubenswrapper[4779]: I0929 19:08:55.032650 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:55 crc kubenswrapper[4779]: I0929 19:08:55.032673 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:08:55 crc kubenswrapper[4779]: I0929 19:08:55.032691 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:08:55Z","lastTransitionTime":"2025-09-29T19:08:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:08:55 crc kubenswrapper[4779]: I0929 19:08:55.073076 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/1.log" Sep 29 19:08:55 crc kubenswrapper[4779]: I0929 19:08:55.074988 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"19af6b603d6261471bf10fbf65244aa6842b15a02fbbdea09a8ab2d01ec1e1a4"} Sep 29 19:08:55 crc kubenswrapper[4779]: I0929 19:08:55.075336 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 29 19:08:55 crc kubenswrapper[4779]: I0929 19:08:55.091874 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"49dd46af-a7fa-47b0-94ab-c9a999564fca\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a67efc46aeb5956568b680b4041d83a55c0410ccefdb4e43b95503c6416e1dfc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2253a3406f6de9463c1c615ad2ac76ce935067baa57b260f18afdb8d9e639e95\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa9b804f65dc9265342d6ec10b8cdd31d49f692289c7fd8292944e25a2c95ebe\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://02428a90ed2dd1c36d800ffda10ed8759d92a87ec8f467cdf9fb11511b9b9420\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:09Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:55Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:55 crc kubenswrapper[4779]: I0929 19:08:55.108652 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ef5a50f788b42b32f4ec04eedc9f5c9dbcd07f64b430ec684d8a91b414e6c18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb910a5c408a63f6c4391cf2fae320e14d6dc945125b53eefde3b666ef9d0754\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:55Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:55 crc kubenswrapper[4779]: I0929 19:08:55.125034 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://15b9c6a3d7c79b30673cd8040a9f3033e2405cf2e29b38d8ba2bee9cbb384794\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:55Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:55 crc kubenswrapper[4779]: I0929 19:08:55.136006 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:55 crc kubenswrapper[4779]: I0929 19:08:55.136052 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:55 crc kubenswrapper[4779]: I0929 19:08:55.136062 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:55 crc kubenswrapper[4779]: I0929 19:08:55.136083 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:08:55 crc kubenswrapper[4779]: I0929 19:08:55.136097 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:08:55Z","lastTransitionTime":"2025-09-29T19:08:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:08:55 crc kubenswrapper[4779]: I0929 19:08:55.142770 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jfbb6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3ac24bbf-c37a-4253-be71-8d8f15cfd48e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2ea6959ae00f851559cc059498a3a7848fbbb55f6a25ed82e38efe62a7ee85ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8ckjq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:30Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jfbb6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:55Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:55 crc kubenswrapper[4779]: I0929 19:08:55.158184 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-cvx8m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"54a33b8e-b623-4f91-be1d-a38dfcef17d7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://49599fb872eb458cb3fd77da63518f77653f671e82a8fc1db5f9e2b64e3c4d1d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e697e7c54a1b8adfde2dfa86399bfd13d895196c12108ac625bd9b94ad913d1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e697e7c54a1b8adfde2dfa86399bfd13d895196c12108ac625bd9b94ad913d1e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://75932bd27c986408eb2a3ec9aefffa392bf1ede4be0ddeabb9fdcd5fcc56199d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://75932bd27c986408eb2a3ec9aefffa392bf1ede4be0ddeabb9fdcd5fcc56199d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e058271bf65bb6f216298a9fc58b97c76335275829be7e5bec59d92688e83a05\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e058271bf65bb6f216298a9fc58b97c76335275829be7e5bec59d92688e83a05\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://edec2f7ae27e72dcc2eac62281dc9b866cdc31ecbda5e24020043aad86a44784\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://edec2f7ae27e72dcc2eac62281dc9b866cdc31ecbda5e24020043aad86a44784\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1750034ffcb61628b437e7b4f604be66ea6401d0ddb8a70f37fbb358c261670f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1750034ffcb61628b437e7b4f604be66ea6401d0ddb8a70f37fbb358c261670f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3409883fc5d9ed6b5e08d50c9d2821a4ac0b8932c9a0296c12065b0abb0354d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3409883fc5d9ed6b5e08d50c9d2821a4ac0b8932c9a0296c12065b0abb0354d6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:30Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-cvx8m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:55Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:55 crc kubenswrapper[4779]: I0929 19:08:55.175303 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2f701cb-2e7d-4cea-8455-f68605964ac6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dfa32890cde2a0a47211239f797f83380b4418ad2b8fb1560c779705e68df39c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a017f2621de03e0184a0a168819a6762e4b36a1ae5c66a6bcfd1d65266fcb52\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a9691127cc0437ceebef7a82219adb83625690db4d6bf5cf1c8177a6a9e1284\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://19af6b603d6261471bf10fbf65244aa6842b15a02fbbdea09a8ab2d01ec1e1a4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b14ae0b08f077b3acbe802e4a5ec06dd10075100c37d5587119a09a92b4651a\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T19:08:29Z\\\",\\\"message\\\":\\\"le observer\\\\nW0929 19:08:29.408025 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0929 19:08:29.408423 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 19:08:29.411188 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1038548135/tls.crt::/tmp/serving-cert-1038548135/tls.key\\\\\\\"\\\\nI0929 19:08:29.848353 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0929 19:08:29.866865 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0929 19:08:29.866980 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0929 19:08:29.867032 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0929 19:08:29.867061 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0929 19:08:29.872666 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0929 19:08:29.872766 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0929 19:08:29.872793 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0929 19:08:29.872817 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0929 19:08:29.872839 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0929 19:08:29.872861 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0929 19:08:29.872883 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0929 19:08:29.873077 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0929 19:08:29.875629 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:24Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f0916d37ad679d190403229b40390af2860e24f777a07cd5ef0ac2f7d3fc3c41\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5a737775c31cf8dc37838d904ee00f1075bc2aa52bea63587806cb7486950eaf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5a737775c31cf8dc37838d904ee00f1075bc2aa52bea63587806cb7486950eaf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:09Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:55Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:55 crc kubenswrapper[4779]: I0929 19:08:55.190741 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:55Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:55 crc kubenswrapper[4779]: I0929 19:08:55.217753 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-42vjg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"046df2ef-fb75-4d32-93e6-17b36af0a7c2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://629d703a556665e86ce3b0a5af995157440c71c8ab7d29b9259865cd624543c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1555e96e70a9119cf2298d91512b1d8cffb303b4327d0e20bdf686e7e36e4461\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0d18ddf7528c789be80d32cd6921ada5ed114d01130b7be97e73ec6d9168267e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b6ffbcb52a6e250cd4b4bc4e8c089ef94babaaf0db836fec9f477450464eebe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://76304a56be996b646d706c0e7df995bb1518962396c7558304b1627e5c215c11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6dcd0277739f6d5e5c90c617485f053c1c3996192f15b081c660741987ec2611\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://418a68d2ab745fecfe78084a541301c6af31b6ece2ccb58d61be58b2989235ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://418a68d2ab745fecfe78084a541301c6af31b6ece2ccb58d61be58b2989235ba\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T19:08:41Z\\\",\\\"message\\\":\\\"rr: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:41Z is after 2025-08-24T17:21:41Z]\\\\nI0929 19:08:41.905972 6175 model_client.go:382] Update operations generated as: [{Op:update Table:NAT Row:map[external_ip:192.168.126.11 logical_ip:10.217.0.92 options:{GoMap:map[stateless:false]} type:snat] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {73135118-cf1b-4568-bd31-2f50308bf69d}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI0929 19:08:41.905727 6175 services_controller.go:434] Service default/kubernetes retrieved from lister for network=default: \\\\u0026Service{ObjectMeta:{kubernetes default 1fcaffea-cfe2-4295-9c2a-a3b3626fb3f1 259 0 2025-02-23 05:11:12 +0000 UTC \\\\u003cnil\\\\u003e \\\\u003cnil\\\\u003e map[component:apiserver provider:kubernetes] map[] [] [] []},Spec:Servic\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:41Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-42vjg_openshift-ovn-kubernetes(046df2ef-fb75-4d32-93e6-17b36af0a7c2)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c8993f5be0b77b752f60e7346f609aaeef20db2f0e72cbebde06a11fa7d2f5bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e1e03c63d08ebe2a4b76954140684a5b43da9e92b93490aa7efa715fc2aefb4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e1e03c63d08ebe2a4b76954140684a5b43da9e92b93490aa7efa715fc2aefb4c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:30Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-42vjg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:55Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:55 crc kubenswrapper[4779]: I0929 19:08:55.228456 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-zxpg4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e0db9b28-7e3a-4d44-9e98-1a07c8e5b8d6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2f0f837dfb05912fb4f7b0cb08cdb363cb3bfe2cc91a0bebd1b2b2b8128d78cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6ghv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:33Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-zxpg4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:55Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:55 crc kubenswrapper[4779]: I0929 19:08:55.238761 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:55 crc kubenswrapper[4779]: I0929 19:08:55.238798 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:55 crc kubenswrapper[4779]: I0929 19:08:55.238807 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:55 crc kubenswrapper[4779]: I0929 19:08:55.238822 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:08:55 crc kubenswrapper[4779]: I0929 19:08:55.238832 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:08:55Z","lastTransitionTime":"2025-09-29T19:08:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:08:55 crc kubenswrapper[4779]: I0929 19:08:55.240121 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-7hb2m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"45b89d12-bbd2-4b47-815d-a7421cc1aa00\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://002d12663649198e6136f3e9df16a3b1cd8c64906bf39bde03cd0dde8c36abbc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bvn4g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:30Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-7hb2m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:55Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:55 crc kubenswrapper[4779]: I0929 19:08:55.254005 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:55Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:55 crc kubenswrapper[4779]: I0929 19:08:55.267662 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fvdtc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"97c4781c-2d4b-4eab-96fb-39a342c2d4a0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c2179d9cf26eb6d6fcc63666d3dd53e9a1a66cf633c9fe0b29e6d6ac726c8119\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7cxcv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3aeed56672d12f2355eda710f9021c6271debfaf1f7b7d6b1ff5f4ecc6764e2e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7cxcv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:43Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-fvdtc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:55Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:55 crc kubenswrapper[4779]: I0929 19:08:55.283691 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:55Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:55 crc kubenswrapper[4779]: I0929 19:08:55.301993 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://04d9dd8d319497dd3ed7c565217d8b9d228a1952a3cd0e8dee3cdad781c5770a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:55Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:55 crc kubenswrapper[4779]: I0929 19:08:55.313765 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"476bc421-1113-455e-bcc8-e207e47dad19\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://30844145987b3dae28d2ca3f75a36e21c27216b267ff40fdb83094b8045403c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tb7zk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7168ecfffda2ef4f7782650466ccebb8abcff3293fcd33d44eb4ee24abf6339c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tb7zk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:30Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-d5cnr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:55Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:55 crc kubenswrapper[4779]: I0929 19:08:55.331309 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-2rtwf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4df079c4-34e3-4132-91bb-ad68488552f8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7sr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7sr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:44Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-2rtwf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:55Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:55 crc kubenswrapper[4779]: I0929 19:08:55.341883 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:55 crc kubenswrapper[4779]: I0929 19:08:55.341937 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:55 crc kubenswrapper[4779]: I0929 19:08:55.341952 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:55 crc kubenswrapper[4779]: I0929 19:08:55.341976 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:08:55 crc kubenswrapper[4779]: I0929 19:08:55.341990 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:08:55Z","lastTransitionTime":"2025-09-29T19:08:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:08:55 crc kubenswrapper[4779]: I0929 19:08:55.444904 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:55 crc kubenswrapper[4779]: I0929 19:08:55.444947 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:55 crc kubenswrapper[4779]: I0929 19:08:55.444959 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:55 crc kubenswrapper[4779]: I0929 19:08:55.444976 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:08:55 crc kubenswrapper[4779]: I0929 19:08:55.444986 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:08:55Z","lastTransitionTime":"2025-09-29T19:08:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:08:55 crc kubenswrapper[4779]: I0929 19:08:55.548421 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:55 crc kubenswrapper[4779]: I0929 19:08:55.548463 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:55 crc kubenswrapper[4779]: I0929 19:08:55.548472 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:55 crc kubenswrapper[4779]: I0929 19:08:55.548487 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:08:55 crc kubenswrapper[4779]: I0929 19:08:55.548498 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:08:55Z","lastTransitionTime":"2025-09-29T19:08:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:08:55 crc kubenswrapper[4779]: I0929 19:08:55.651725 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:55 crc kubenswrapper[4779]: I0929 19:08:55.651777 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:55 crc kubenswrapper[4779]: I0929 19:08:55.651789 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:55 crc kubenswrapper[4779]: I0929 19:08:55.651810 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:08:55 crc kubenswrapper[4779]: I0929 19:08:55.651824 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:08:55Z","lastTransitionTime":"2025-09-29T19:08:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:08:55 crc kubenswrapper[4779]: I0929 19:08:55.754916 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:55 crc kubenswrapper[4779]: I0929 19:08:55.754967 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:55 crc kubenswrapper[4779]: I0929 19:08:55.754991 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:55 crc kubenswrapper[4779]: I0929 19:08:55.755016 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:08:55 crc kubenswrapper[4779]: I0929 19:08:55.755032 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:08:55Z","lastTransitionTime":"2025-09-29T19:08:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:08:55 crc kubenswrapper[4779]: I0929 19:08:55.858039 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:55 crc kubenswrapper[4779]: I0929 19:08:55.858108 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:55 crc kubenswrapper[4779]: I0929 19:08:55.858126 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:55 crc kubenswrapper[4779]: I0929 19:08:55.858154 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:08:55 crc kubenswrapper[4779]: I0929 19:08:55.858171 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:08:55Z","lastTransitionTime":"2025-09-29T19:08:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:08:55 crc kubenswrapper[4779]: I0929 19:08:55.960841 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:55 crc kubenswrapper[4779]: I0929 19:08:55.960894 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:55 crc kubenswrapper[4779]: I0929 19:08:55.960908 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:55 crc kubenswrapper[4779]: I0929 19:08:55.960929 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:08:55 crc kubenswrapper[4779]: I0929 19:08:55.960946 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:08:55Z","lastTransitionTime":"2025-09-29T19:08:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:08:56 crc kubenswrapper[4779]: I0929 19:08:56.063665 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:56 crc kubenswrapper[4779]: I0929 19:08:56.063710 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:56 crc kubenswrapper[4779]: I0929 19:08:56.063720 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:56 crc kubenswrapper[4779]: I0929 19:08:56.063737 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:08:56 crc kubenswrapper[4779]: I0929 19:08:56.063747 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:08:56Z","lastTransitionTime":"2025-09-29T19:08:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:08:56 crc kubenswrapper[4779]: I0929 19:08:56.166684 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:56 crc kubenswrapper[4779]: I0929 19:08:56.166742 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:56 crc kubenswrapper[4779]: I0929 19:08:56.166752 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:56 crc kubenswrapper[4779]: I0929 19:08:56.166769 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:08:56 crc kubenswrapper[4779]: I0929 19:08:56.166781 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:08:56Z","lastTransitionTime":"2025-09-29T19:08:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:08:56 crc kubenswrapper[4779]: I0929 19:08:56.270201 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:56 crc kubenswrapper[4779]: I0929 19:08:56.270269 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:56 crc kubenswrapper[4779]: I0929 19:08:56.270281 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:56 crc kubenswrapper[4779]: I0929 19:08:56.270302 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:08:56 crc kubenswrapper[4779]: I0929 19:08:56.270339 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:08:56Z","lastTransitionTime":"2025-09-29T19:08:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:08:56 crc kubenswrapper[4779]: I0929 19:08:56.354471 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:56 crc kubenswrapper[4779]: I0929 19:08:56.354526 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:56 crc kubenswrapper[4779]: I0929 19:08:56.354538 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:56 crc kubenswrapper[4779]: I0929 19:08:56.354555 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:08:56 crc kubenswrapper[4779]: I0929 19:08:56.354565 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:08:56Z","lastTransitionTime":"2025-09-29T19:08:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:08:56 crc kubenswrapper[4779]: E0929 19:08:56.372691 4779 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T19:08:56Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:56Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T19:08:56Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:56Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T19:08:56Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:56Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T19:08:56Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:56Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c0f08dfd-4d0c-4b55-a30c-6725bfe13689\\\",\\\"systemUUID\\\":\\\"d61591a8-214b-4be1-8c58-e9ade5216b62\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:56Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:56 crc kubenswrapper[4779]: I0929 19:08:56.377054 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:56 crc kubenswrapper[4779]: I0929 19:08:56.377085 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:56 crc kubenswrapper[4779]: I0929 19:08:56.377093 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:56 crc kubenswrapper[4779]: I0929 19:08:56.377109 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:08:56 crc kubenswrapper[4779]: I0929 19:08:56.377119 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:08:56Z","lastTransitionTime":"2025-09-29T19:08:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:08:56 crc kubenswrapper[4779]: E0929 19:08:56.395917 4779 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T19:08:56Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:56Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T19:08:56Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:56Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T19:08:56Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:56Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T19:08:56Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:56Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c0f08dfd-4d0c-4b55-a30c-6725bfe13689\\\",\\\"systemUUID\\\":\\\"d61591a8-214b-4be1-8c58-e9ade5216b62\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:56Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:56 crc kubenswrapper[4779]: I0929 19:08:56.401576 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:56 crc kubenswrapper[4779]: I0929 19:08:56.401643 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:56 crc kubenswrapper[4779]: I0929 19:08:56.401666 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:56 crc kubenswrapper[4779]: I0929 19:08:56.401691 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:08:56 crc kubenswrapper[4779]: I0929 19:08:56.401710 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:08:56Z","lastTransitionTime":"2025-09-29T19:08:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:08:56 crc kubenswrapper[4779]: E0929 19:08:56.417567 4779 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T19:08:56Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:56Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T19:08:56Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:56Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T19:08:56Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:56Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T19:08:56Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:56Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c0f08dfd-4d0c-4b55-a30c-6725bfe13689\\\",\\\"systemUUID\\\":\\\"d61591a8-214b-4be1-8c58-e9ade5216b62\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:56Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:56 crc kubenswrapper[4779]: I0929 19:08:56.422137 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:56 crc kubenswrapper[4779]: I0929 19:08:56.422178 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:56 crc kubenswrapper[4779]: I0929 19:08:56.422193 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:56 crc kubenswrapper[4779]: I0929 19:08:56.422214 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:08:56 crc kubenswrapper[4779]: I0929 19:08:56.422228 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:08:56Z","lastTransitionTime":"2025-09-29T19:08:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:08:56 crc kubenswrapper[4779]: E0929 19:08:56.437440 4779 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T19:08:56Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:56Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T19:08:56Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:56Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T19:08:56Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:56Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T19:08:56Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:56Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c0f08dfd-4d0c-4b55-a30c-6725bfe13689\\\",\\\"systemUUID\\\":\\\"d61591a8-214b-4be1-8c58-e9ade5216b62\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:56Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:56 crc kubenswrapper[4779]: I0929 19:08:56.442567 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:56 crc kubenswrapper[4779]: I0929 19:08:56.442659 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:56 crc kubenswrapper[4779]: I0929 19:08:56.442680 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:56 crc kubenswrapper[4779]: I0929 19:08:56.442700 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:08:56 crc kubenswrapper[4779]: I0929 19:08:56.442712 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:08:56Z","lastTransitionTime":"2025-09-29T19:08:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:08:56 crc kubenswrapper[4779]: E0929 19:08:56.458849 4779 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T19:08:56Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:56Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T19:08:56Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:56Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T19:08:56Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:56Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T19:08:56Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:56Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c0f08dfd-4d0c-4b55-a30c-6725bfe13689\\\",\\\"systemUUID\\\":\\\"d61591a8-214b-4be1-8c58-e9ade5216b62\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:56Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:56 crc kubenswrapper[4779]: E0929 19:08:56.459101 4779 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Sep 29 19:08:56 crc kubenswrapper[4779]: I0929 19:08:56.460844 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:56 crc kubenswrapper[4779]: I0929 19:08:56.460893 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:56 crc kubenswrapper[4779]: I0929 19:08:56.460909 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:56 crc kubenswrapper[4779]: I0929 19:08:56.460935 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:08:56 crc kubenswrapper[4779]: I0929 19:08:56.460951 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:08:56Z","lastTransitionTime":"2025-09-29T19:08:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:08:56 crc kubenswrapper[4779]: I0929 19:08:56.563764 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:56 crc kubenswrapper[4779]: I0929 19:08:56.563804 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:56 crc kubenswrapper[4779]: I0929 19:08:56.563812 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:56 crc kubenswrapper[4779]: I0929 19:08:56.563829 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:08:56 crc kubenswrapper[4779]: I0929 19:08:56.563839 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:08:56Z","lastTransitionTime":"2025-09-29T19:08:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:08:56 crc kubenswrapper[4779]: I0929 19:08:56.665692 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:56 crc kubenswrapper[4779]: I0929 19:08:56.665729 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:56 crc kubenswrapper[4779]: I0929 19:08:56.665737 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:56 crc kubenswrapper[4779]: I0929 19:08:56.665750 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:08:56 crc kubenswrapper[4779]: I0929 19:08:56.665760 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:08:56Z","lastTransitionTime":"2025-09-29T19:08:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:08:56 crc kubenswrapper[4779]: I0929 19:08:56.765188 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-2rtwf" Sep 29 19:08:56 crc kubenswrapper[4779]: I0929 19:08:56.765247 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 19:08:56 crc kubenswrapper[4779]: I0929 19:08:56.765203 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 19:08:56 crc kubenswrapper[4779]: I0929 19:08:56.765363 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 19:08:56 crc kubenswrapper[4779]: E0929 19:08:56.765529 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-2rtwf" podUID="4df079c4-34e3-4132-91bb-ad68488552f8" Sep 29 19:08:56 crc kubenswrapper[4779]: E0929 19:08:56.765633 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 19:08:56 crc kubenswrapper[4779]: E0929 19:08:56.765717 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 19:08:56 crc kubenswrapper[4779]: E0929 19:08:56.765857 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 19:08:56 crc kubenswrapper[4779]: I0929 19:08:56.767769 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:56 crc kubenswrapper[4779]: I0929 19:08:56.767809 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:56 crc kubenswrapper[4779]: I0929 19:08:56.767819 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:56 crc kubenswrapper[4779]: I0929 19:08:56.767835 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:08:56 crc kubenswrapper[4779]: I0929 19:08:56.767847 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:08:56Z","lastTransitionTime":"2025-09-29T19:08:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:08:56 crc kubenswrapper[4779]: I0929 19:08:56.870170 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:56 crc kubenswrapper[4779]: I0929 19:08:56.870250 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:56 crc kubenswrapper[4779]: I0929 19:08:56.870263 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:56 crc kubenswrapper[4779]: I0929 19:08:56.870280 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:08:56 crc kubenswrapper[4779]: I0929 19:08:56.870294 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:08:56Z","lastTransitionTime":"2025-09-29T19:08:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:08:56 crc kubenswrapper[4779]: I0929 19:08:56.972494 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:56 crc kubenswrapper[4779]: I0929 19:08:56.972553 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:56 crc kubenswrapper[4779]: I0929 19:08:56.972572 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:56 crc kubenswrapper[4779]: I0929 19:08:56.972596 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:08:56 crc kubenswrapper[4779]: I0929 19:08:56.972612 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:08:56Z","lastTransitionTime":"2025-09-29T19:08:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:08:57 crc kubenswrapper[4779]: I0929 19:08:57.075310 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:57 crc kubenswrapper[4779]: I0929 19:08:57.075417 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:57 crc kubenswrapper[4779]: I0929 19:08:57.075434 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:57 crc kubenswrapper[4779]: I0929 19:08:57.075453 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:08:57 crc kubenswrapper[4779]: I0929 19:08:57.075466 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:08:57Z","lastTransitionTime":"2025-09-29T19:08:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:08:57 crc kubenswrapper[4779]: I0929 19:08:57.177857 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:57 crc kubenswrapper[4779]: I0929 19:08:57.177940 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:57 crc kubenswrapper[4779]: I0929 19:08:57.177953 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:57 crc kubenswrapper[4779]: I0929 19:08:57.177975 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:08:57 crc kubenswrapper[4779]: I0929 19:08:57.177987 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:08:57Z","lastTransitionTime":"2025-09-29T19:08:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:08:57 crc kubenswrapper[4779]: I0929 19:08:57.279776 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:57 crc kubenswrapper[4779]: I0929 19:08:57.279831 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:57 crc kubenswrapper[4779]: I0929 19:08:57.279847 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:57 crc kubenswrapper[4779]: I0929 19:08:57.279870 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:08:57 crc kubenswrapper[4779]: I0929 19:08:57.279887 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:08:57Z","lastTransitionTime":"2025-09-29T19:08:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:08:57 crc kubenswrapper[4779]: I0929 19:08:57.383257 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:57 crc kubenswrapper[4779]: I0929 19:08:57.383408 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:57 crc kubenswrapper[4779]: I0929 19:08:57.383437 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:57 crc kubenswrapper[4779]: I0929 19:08:57.383461 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:08:57 crc kubenswrapper[4779]: I0929 19:08:57.383481 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:08:57Z","lastTransitionTime":"2025-09-29T19:08:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:08:57 crc kubenswrapper[4779]: I0929 19:08:57.485468 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:57 crc kubenswrapper[4779]: I0929 19:08:57.485530 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:57 crc kubenswrapper[4779]: I0929 19:08:57.485568 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:57 crc kubenswrapper[4779]: I0929 19:08:57.485593 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:08:57 crc kubenswrapper[4779]: I0929 19:08:57.485610 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:08:57Z","lastTransitionTime":"2025-09-29T19:08:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:08:57 crc kubenswrapper[4779]: I0929 19:08:57.587944 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:57 crc kubenswrapper[4779]: I0929 19:08:57.588017 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:57 crc kubenswrapper[4779]: I0929 19:08:57.588038 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:57 crc kubenswrapper[4779]: I0929 19:08:57.588065 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:08:57 crc kubenswrapper[4779]: I0929 19:08:57.588084 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:08:57Z","lastTransitionTime":"2025-09-29T19:08:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:08:57 crc kubenswrapper[4779]: I0929 19:08:57.697902 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:57 crc kubenswrapper[4779]: I0929 19:08:57.697947 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:57 crc kubenswrapper[4779]: I0929 19:08:57.697956 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:57 crc kubenswrapper[4779]: I0929 19:08:57.697968 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:08:57 crc kubenswrapper[4779]: I0929 19:08:57.697983 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:08:57Z","lastTransitionTime":"2025-09-29T19:08:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:08:57 crc kubenswrapper[4779]: I0929 19:08:57.801462 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:57 crc kubenswrapper[4779]: I0929 19:08:57.801545 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:57 crc kubenswrapper[4779]: I0929 19:08:57.801568 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:57 crc kubenswrapper[4779]: I0929 19:08:57.801600 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:08:57 crc kubenswrapper[4779]: I0929 19:08:57.801625 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:08:57Z","lastTransitionTime":"2025-09-29T19:08:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:08:57 crc kubenswrapper[4779]: I0929 19:08:57.904227 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:57 crc kubenswrapper[4779]: I0929 19:08:57.904277 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:57 crc kubenswrapper[4779]: I0929 19:08:57.904291 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:57 crc kubenswrapper[4779]: I0929 19:08:57.904308 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:08:57 crc kubenswrapper[4779]: I0929 19:08:57.904343 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:08:57Z","lastTransitionTime":"2025-09-29T19:08:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:08:58 crc kubenswrapper[4779]: I0929 19:08:58.007283 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:58 crc kubenswrapper[4779]: I0929 19:08:58.007385 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:58 crc kubenswrapper[4779]: I0929 19:08:58.007398 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:58 crc kubenswrapper[4779]: I0929 19:08:58.007422 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:08:58 crc kubenswrapper[4779]: I0929 19:08:58.007436 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:08:58Z","lastTransitionTime":"2025-09-29T19:08:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:08:58 crc kubenswrapper[4779]: I0929 19:08:58.111168 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:58 crc kubenswrapper[4779]: I0929 19:08:58.111225 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:58 crc kubenswrapper[4779]: I0929 19:08:58.111236 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:58 crc kubenswrapper[4779]: I0929 19:08:58.111256 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:08:58 crc kubenswrapper[4779]: I0929 19:08:58.111269 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:08:58Z","lastTransitionTime":"2025-09-29T19:08:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:08:58 crc kubenswrapper[4779]: I0929 19:08:58.214607 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:58 crc kubenswrapper[4779]: I0929 19:08:58.214674 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:58 crc kubenswrapper[4779]: I0929 19:08:58.214691 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:58 crc kubenswrapper[4779]: I0929 19:08:58.214714 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:08:58 crc kubenswrapper[4779]: I0929 19:08:58.214728 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:08:58Z","lastTransitionTime":"2025-09-29T19:08:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:08:58 crc kubenswrapper[4779]: I0929 19:08:58.317552 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:58 crc kubenswrapper[4779]: I0929 19:08:58.317626 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:58 crc kubenswrapper[4779]: I0929 19:08:58.317640 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:58 crc kubenswrapper[4779]: I0929 19:08:58.317660 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:08:58 crc kubenswrapper[4779]: I0929 19:08:58.317680 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:08:58Z","lastTransitionTime":"2025-09-29T19:08:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:08:58 crc kubenswrapper[4779]: I0929 19:08:58.420300 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:58 crc kubenswrapper[4779]: I0929 19:08:58.420409 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:58 crc kubenswrapper[4779]: I0929 19:08:58.420422 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:58 crc kubenswrapper[4779]: I0929 19:08:58.420442 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:08:58 crc kubenswrapper[4779]: I0929 19:08:58.420456 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:08:58Z","lastTransitionTime":"2025-09-29T19:08:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:08:58 crc kubenswrapper[4779]: I0929 19:08:58.523644 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:58 crc kubenswrapper[4779]: I0929 19:08:58.523724 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:58 crc kubenswrapper[4779]: I0929 19:08:58.523750 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:58 crc kubenswrapper[4779]: I0929 19:08:58.523779 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:08:58 crc kubenswrapper[4779]: I0929 19:08:58.523801 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:08:58Z","lastTransitionTime":"2025-09-29T19:08:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:08:58 crc kubenswrapper[4779]: I0929 19:08:58.626253 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:58 crc kubenswrapper[4779]: I0929 19:08:58.626337 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:58 crc kubenswrapper[4779]: I0929 19:08:58.626354 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:58 crc kubenswrapper[4779]: I0929 19:08:58.626370 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:08:58 crc kubenswrapper[4779]: I0929 19:08:58.626405 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:08:58Z","lastTransitionTime":"2025-09-29T19:08:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:08:58 crc kubenswrapper[4779]: I0929 19:08:58.729630 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:58 crc kubenswrapper[4779]: I0929 19:08:58.729677 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:58 crc kubenswrapper[4779]: I0929 19:08:58.729688 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:58 crc kubenswrapper[4779]: I0929 19:08:58.729705 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:08:58 crc kubenswrapper[4779]: I0929 19:08:58.729718 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:08:58Z","lastTransitionTime":"2025-09-29T19:08:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:08:58 crc kubenswrapper[4779]: I0929 19:08:58.765878 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 19:08:58 crc kubenswrapper[4779]: I0929 19:08:58.765901 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 19:08:58 crc kubenswrapper[4779]: I0929 19:08:58.766043 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-2rtwf" Sep 29 19:08:58 crc kubenswrapper[4779]: E0929 19:08:58.766069 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 19:08:58 crc kubenswrapper[4779]: E0929 19:08:58.766181 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 19:08:58 crc kubenswrapper[4779]: I0929 19:08:58.766541 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 19:08:58 crc kubenswrapper[4779]: E0929 19:08:58.766649 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-2rtwf" podUID="4df079c4-34e3-4132-91bb-ad68488552f8" Sep 29 19:08:58 crc kubenswrapper[4779]: E0929 19:08:58.766741 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 19:08:58 crc kubenswrapper[4779]: I0929 19:08:58.832653 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:58 crc kubenswrapper[4779]: I0929 19:08:58.832715 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:58 crc kubenswrapper[4779]: I0929 19:08:58.832731 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:58 crc kubenswrapper[4779]: I0929 19:08:58.832756 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:08:58 crc kubenswrapper[4779]: I0929 19:08:58.832780 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:08:58Z","lastTransitionTime":"2025-09-29T19:08:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:08:58 crc kubenswrapper[4779]: I0929 19:08:58.935692 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:58 crc kubenswrapper[4779]: I0929 19:08:58.935748 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:58 crc kubenswrapper[4779]: I0929 19:08:58.935765 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:58 crc kubenswrapper[4779]: I0929 19:08:58.935788 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:08:58 crc kubenswrapper[4779]: I0929 19:08:58.935802 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:08:58Z","lastTransitionTime":"2025-09-29T19:08:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:08:59 crc kubenswrapper[4779]: I0929 19:08:59.039191 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:59 crc kubenswrapper[4779]: I0929 19:08:59.039270 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:59 crc kubenswrapper[4779]: I0929 19:08:59.039284 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:59 crc kubenswrapper[4779]: I0929 19:08:59.039346 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:08:59 crc kubenswrapper[4779]: I0929 19:08:59.039364 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:08:59Z","lastTransitionTime":"2025-09-29T19:08:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:08:59 crc kubenswrapper[4779]: I0929 19:08:59.143604 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:59 crc kubenswrapper[4779]: I0929 19:08:59.143695 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:59 crc kubenswrapper[4779]: I0929 19:08:59.143720 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:59 crc kubenswrapper[4779]: I0929 19:08:59.143752 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:08:59 crc kubenswrapper[4779]: I0929 19:08:59.143781 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:08:59Z","lastTransitionTime":"2025-09-29T19:08:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:08:59 crc kubenswrapper[4779]: I0929 19:08:59.246702 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:59 crc kubenswrapper[4779]: I0929 19:08:59.246762 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:59 crc kubenswrapper[4779]: I0929 19:08:59.246774 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:59 crc kubenswrapper[4779]: I0929 19:08:59.246795 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:08:59 crc kubenswrapper[4779]: I0929 19:08:59.246808 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:08:59Z","lastTransitionTime":"2025-09-29T19:08:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:08:59 crc kubenswrapper[4779]: I0929 19:08:59.350600 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:59 crc kubenswrapper[4779]: I0929 19:08:59.350656 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:59 crc kubenswrapper[4779]: I0929 19:08:59.350668 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:59 crc kubenswrapper[4779]: I0929 19:08:59.350691 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:08:59 crc kubenswrapper[4779]: I0929 19:08:59.350704 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:08:59Z","lastTransitionTime":"2025-09-29T19:08:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:08:59 crc kubenswrapper[4779]: I0929 19:08:59.454194 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:59 crc kubenswrapper[4779]: I0929 19:08:59.454264 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:59 crc kubenswrapper[4779]: I0929 19:08:59.454281 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:59 crc kubenswrapper[4779]: I0929 19:08:59.454308 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:08:59 crc kubenswrapper[4779]: I0929 19:08:59.454356 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:08:59Z","lastTransitionTime":"2025-09-29T19:08:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:08:59 crc kubenswrapper[4779]: I0929 19:08:59.557723 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:59 crc kubenswrapper[4779]: I0929 19:08:59.557777 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:59 crc kubenswrapper[4779]: I0929 19:08:59.557789 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:59 crc kubenswrapper[4779]: I0929 19:08:59.557808 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:08:59 crc kubenswrapper[4779]: I0929 19:08:59.557820 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:08:59Z","lastTransitionTime":"2025-09-29T19:08:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:08:59 crc kubenswrapper[4779]: I0929 19:08:59.661169 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:59 crc kubenswrapper[4779]: I0929 19:08:59.661203 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:59 crc kubenswrapper[4779]: I0929 19:08:59.661212 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:59 crc kubenswrapper[4779]: I0929 19:08:59.661225 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:08:59 crc kubenswrapper[4779]: I0929 19:08:59.661233 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:08:59Z","lastTransitionTime":"2025-09-29T19:08:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:08:59 crc kubenswrapper[4779]: I0929 19:08:59.764389 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:59 crc kubenswrapper[4779]: I0929 19:08:59.764439 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:59 crc kubenswrapper[4779]: I0929 19:08:59.764449 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:59 crc kubenswrapper[4779]: I0929 19:08:59.764465 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:08:59 crc kubenswrapper[4779]: I0929 19:08:59.764474 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:08:59Z","lastTransitionTime":"2025-09-29T19:08:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:08:59 crc kubenswrapper[4779]: I0929 19:08:59.778414 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-7hb2m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"45b89d12-bbd2-4b47-815d-a7421cc1aa00\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://002d12663649198e6136f3e9df16a3b1cd8c64906bf39bde03cd0dde8c36abbc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bvn4g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:30Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-7hb2m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:59Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:59 crc kubenswrapper[4779]: I0929 19:08:59.792226 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:59Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:59 crc kubenswrapper[4779]: I0929 19:08:59.808889 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fvdtc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"97c4781c-2d4b-4eab-96fb-39a342c2d4a0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c2179d9cf26eb6d6fcc63666d3dd53e9a1a66cf633c9fe0b29e6d6ac726c8119\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7cxcv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3aeed56672d12f2355eda710f9021c6271debfaf1f7b7d6b1ff5f4ecc6764e2e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7cxcv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:43Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-fvdtc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:59Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:59 crc kubenswrapper[4779]: I0929 19:08:59.826021 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"476bc421-1113-455e-bcc8-e207e47dad19\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://30844145987b3dae28d2ca3f75a36e21c27216b267ff40fdb83094b8045403c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tb7zk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7168ecfffda2ef4f7782650466ccebb8abcff3293fcd33d44eb4ee24abf6339c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tb7zk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:30Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-d5cnr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:59Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:59 crc kubenswrapper[4779]: I0929 19:08:59.842270 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-2rtwf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4df079c4-34e3-4132-91bb-ad68488552f8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7sr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7sr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:44Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-2rtwf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:59Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:59 crc kubenswrapper[4779]: I0929 19:08:59.865393 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:59Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:59 crc kubenswrapper[4779]: I0929 19:08:59.866808 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:59 crc kubenswrapper[4779]: I0929 19:08:59.866873 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:59 crc kubenswrapper[4779]: I0929 19:08:59.866887 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:59 crc kubenswrapper[4779]: I0929 19:08:59.866911 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:08:59 crc kubenswrapper[4779]: I0929 19:08:59.866925 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:08:59Z","lastTransitionTime":"2025-09-29T19:08:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:08:59 crc kubenswrapper[4779]: I0929 19:08:59.882005 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://04d9dd8d319497dd3ed7c565217d8b9d228a1952a3cd0e8dee3cdad781c5770a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:59Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:59 crc kubenswrapper[4779]: I0929 19:08:59.898275 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://15b9c6a3d7c79b30673cd8040a9f3033e2405cf2e29b38d8ba2bee9cbb384794\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:59Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:59 crc kubenswrapper[4779]: I0929 19:08:59.914942 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jfbb6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3ac24bbf-c37a-4253-be71-8d8f15cfd48e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2ea6959ae00f851559cc059498a3a7848fbbb55f6a25ed82e38efe62a7ee85ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8ckjq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:30Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jfbb6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:59Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:59 crc kubenswrapper[4779]: I0929 19:08:59.932304 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-cvx8m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"54a33b8e-b623-4f91-be1d-a38dfcef17d7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://49599fb872eb458cb3fd77da63518f77653f671e82a8fc1db5f9e2b64e3c4d1d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e697e7c54a1b8adfde2dfa86399bfd13d895196c12108ac625bd9b94ad913d1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e697e7c54a1b8adfde2dfa86399bfd13d895196c12108ac625bd9b94ad913d1e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://75932bd27c986408eb2a3ec9aefffa392bf1ede4be0ddeabb9fdcd5fcc56199d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://75932bd27c986408eb2a3ec9aefffa392bf1ede4be0ddeabb9fdcd5fcc56199d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e058271bf65bb6f216298a9fc58b97c76335275829be7e5bec59d92688e83a05\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e058271bf65bb6f216298a9fc58b97c76335275829be7e5bec59d92688e83a05\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://edec2f7ae27e72dcc2eac62281dc9b866cdc31ecbda5e24020043aad86a44784\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://edec2f7ae27e72dcc2eac62281dc9b866cdc31ecbda5e24020043aad86a44784\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1750034ffcb61628b437e7b4f604be66ea6401d0ddb8a70f37fbb358c261670f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1750034ffcb61628b437e7b4f604be66ea6401d0ddb8a70f37fbb358c261670f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3409883fc5d9ed6b5e08d50c9d2821a4ac0b8932c9a0296c12065b0abb0354d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3409883fc5d9ed6b5e08d50c9d2821a4ac0b8932c9a0296c12065b0abb0354d6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:30Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-cvx8m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:59Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:59 crc kubenswrapper[4779]: I0929 19:08:59.947393 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"49dd46af-a7fa-47b0-94ab-c9a999564fca\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a67efc46aeb5956568b680b4041d83a55c0410ccefdb4e43b95503c6416e1dfc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2253a3406f6de9463c1c615ad2ac76ce935067baa57b260f18afdb8d9e639e95\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa9b804f65dc9265342d6ec10b8cdd31d49f692289c7fd8292944e25a2c95ebe\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://02428a90ed2dd1c36d800ffda10ed8759d92a87ec8f467cdf9fb11511b9b9420\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:09Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:59Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:59 crc kubenswrapper[4779]: I0929 19:08:59.964050 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ef5a50f788b42b32f4ec04eedc9f5c9dbcd07f64b430ec684d8a91b414e6c18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb910a5c408a63f6c4391cf2fae320e14d6dc945125b53eefde3b666ef9d0754\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:59Z is after 2025-08-24T17:21:41Z" Sep 29 19:08:59 crc kubenswrapper[4779]: I0929 19:08:59.969290 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:08:59 crc kubenswrapper[4779]: I0929 19:08:59.969340 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:08:59 crc kubenswrapper[4779]: I0929 19:08:59.969350 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:08:59 crc kubenswrapper[4779]: I0929 19:08:59.969364 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:08:59 crc kubenswrapper[4779]: I0929 19:08:59.969374 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:08:59Z","lastTransitionTime":"2025-09-29T19:08:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:08:59 crc kubenswrapper[4779]: I0929 19:08:59.990996 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-42vjg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"046df2ef-fb75-4d32-93e6-17b36af0a7c2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://629d703a556665e86ce3b0a5af995157440c71c8ab7d29b9259865cd624543c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1555e96e70a9119cf2298d91512b1d8cffb303b4327d0e20bdf686e7e36e4461\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0d18ddf7528c789be80d32cd6921ada5ed114d01130b7be97e73ec6d9168267e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b6ffbcb52a6e250cd4b4bc4e8c089ef94babaaf0db836fec9f477450464eebe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://76304a56be996b646d706c0e7df995bb1518962396c7558304b1627e5c215c11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6dcd0277739f6d5e5c90c617485f053c1c3996192f15b081c660741987ec2611\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://418a68d2ab745fecfe78084a541301c6af31b6ece2ccb58d61be58b2989235ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://418a68d2ab745fecfe78084a541301c6af31b6ece2ccb58d61be58b2989235ba\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T19:08:41Z\\\",\\\"message\\\":\\\"rr: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:41Z is after 2025-08-24T17:21:41Z]\\\\nI0929 19:08:41.905972 6175 model_client.go:382] Update operations generated as: [{Op:update Table:NAT Row:map[external_ip:192.168.126.11 logical_ip:10.217.0.92 options:{GoMap:map[stateless:false]} type:snat] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {73135118-cf1b-4568-bd31-2f50308bf69d}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI0929 19:08:41.905727 6175 services_controller.go:434] Service default/kubernetes retrieved from lister for network=default: \\\\u0026Service{ObjectMeta:{kubernetes default 1fcaffea-cfe2-4295-9c2a-a3b3626fb3f1 259 0 2025-02-23 05:11:12 +0000 UTC \\\\u003cnil\\\\u003e \\\\u003cnil\\\\u003e map[component:apiserver provider:kubernetes] map[] [] [] []},Spec:Servic\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:41Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-42vjg_openshift-ovn-kubernetes(046df2ef-fb75-4d32-93e6-17b36af0a7c2)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c8993f5be0b77b752f60e7346f609aaeef20db2f0e72cbebde06a11fa7d2f5bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e1e03c63d08ebe2a4b76954140684a5b43da9e92b93490aa7efa715fc2aefb4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e1e03c63d08ebe2a4b76954140684a5b43da9e92b93490aa7efa715fc2aefb4c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:30Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-42vjg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:59Z is after 2025-08-24T17:21:41Z" Sep 29 19:09:00 crc kubenswrapper[4779]: I0929 19:09:00.006722 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-zxpg4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e0db9b28-7e3a-4d44-9e98-1a07c8e5b8d6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2f0f837dfb05912fb4f7b0cb08cdb363cb3bfe2cc91a0bebd1b2b2b8128d78cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6ghv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:33Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-zxpg4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:09:00Z is after 2025-08-24T17:21:41Z" Sep 29 19:09:00 crc kubenswrapper[4779]: I0929 19:09:00.024589 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2f701cb-2e7d-4cea-8455-f68605964ac6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dfa32890cde2a0a47211239f797f83380b4418ad2b8fb1560c779705e68df39c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a017f2621de03e0184a0a168819a6762e4b36a1ae5c66a6bcfd1d65266fcb52\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a9691127cc0437ceebef7a82219adb83625690db4d6bf5cf1c8177a6a9e1284\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://19af6b603d6261471bf10fbf65244aa6842b15a02fbbdea09a8ab2d01ec1e1a4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b14ae0b08f077b3acbe802e4a5ec06dd10075100c37d5587119a09a92b4651a\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T19:08:29Z\\\",\\\"message\\\":\\\"le observer\\\\nW0929 19:08:29.408025 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0929 19:08:29.408423 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 19:08:29.411188 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1038548135/tls.crt::/tmp/serving-cert-1038548135/tls.key\\\\\\\"\\\\nI0929 19:08:29.848353 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0929 19:08:29.866865 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0929 19:08:29.866980 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0929 19:08:29.867032 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0929 19:08:29.867061 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0929 19:08:29.872666 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0929 19:08:29.872766 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0929 19:08:29.872793 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0929 19:08:29.872817 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0929 19:08:29.872839 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0929 19:08:29.872861 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0929 19:08:29.872883 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0929 19:08:29.873077 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0929 19:08:29.875629 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:24Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f0916d37ad679d190403229b40390af2860e24f777a07cd5ef0ac2f7d3fc3c41\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5a737775c31cf8dc37838d904ee00f1075bc2aa52bea63587806cb7486950eaf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5a737775c31cf8dc37838d904ee00f1075bc2aa52bea63587806cb7486950eaf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:09Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:09:00Z is after 2025-08-24T17:21:41Z" Sep 29 19:09:00 crc kubenswrapper[4779]: I0929 19:09:00.042388 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:09:00Z is after 2025-08-24T17:21:41Z" Sep 29 19:09:00 crc kubenswrapper[4779]: I0929 19:09:00.072171 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:00 crc kubenswrapper[4779]: I0929 19:09:00.072211 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:00 crc kubenswrapper[4779]: I0929 19:09:00.072226 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:00 crc kubenswrapper[4779]: I0929 19:09:00.072246 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:00 crc kubenswrapper[4779]: I0929 19:09:00.072261 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:00Z","lastTransitionTime":"2025-09-29T19:09:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:00 crc kubenswrapper[4779]: I0929 19:09:00.175185 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:00 crc kubenswrapper[4779]: I0929 19:09:00.175494 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:00 crc kubenswrapper[4779]: I0929 19:09:00.175636 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:00 crc kubenswrapper[4779]: I0929 19:09:00.175875 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:00 crc kubenswrapper[4779]: I0929 19:09:00.175997 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:00Z","lastTransitionTime":"2025-09-29T19:09:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:00 crc kubenswrapper[4779]: I0929 19:09:00.278203 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:00 crc kubenswrapper[4779]: I0929 19:09:00.278233 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:00 crc kubenswrapper[4779]: I0929 19:09:00.278244 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:00 crc kubenswrapper[4779]: I0929 19:09:00.278256 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:00 crc kubenswrapper[4779]: I0929 19:09:00.278265 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:00Z","lastTransitionTime":"2025-09-29T19:09:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:00 crc kubenswrapper[4779]: I0929 19:09:00.381962 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:00 crc kubenswrapper[4779]: I0929 19:09:00.382026 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:00 crc kubenswrapper[4779]: I0929 19:09:00.382043 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:00 crc kubenswrapper[4779]: I0929 19:09:00.382115 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:00 crc kubenswrapper[4779]: I0929 19:09:00.382136 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:00Z","lastTransitionTime":"2025-09-29T19:09:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:00 crc kubenswrapper[4779]: I0929 19:09:00.463938 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/4df079c4-34e3-4132-91bb-ad68488552f8-metrics-certs\") pod \"network-metrics-daemon-2rtwf\" (UID: \"4df079c4-34e3-4132-91bb-ad68488552f8\") " pod="openshift-multus/network-metrics-daemon-2rtwf" Sep 29 19:09:00 crc kubenswrapper[4779]: E0929 19:09:00.464113 4779 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Sep 29 19:09:00 crc kubenswrapper[4779]: E0929 19:09:00.464189 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/4df079c4-34e3-4132-91bb-ad68488552f8-metrics-certs podName:4df079c4-34e3-4132-91bb-ad68488552f8 nodeName:}" failed. No retries permitted until 2025-09-29 19:09:16.464167235 +0000 UTC m=+67.348592365 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/4df079c4-34e3-4132-91bb-ad68488552f8-metrics-certs") pod "network-metrics-daemon-2rtwf" (UID: "4df079c4-34e3-4132-91bb-ad68488552f8") : object "openshift-multus"/"metrics-daemon-secret" not registered Sep 29 19:09:00 crc kubenswrapper[4779]: I0929 19:09:00.485447 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:00 crc kubenswrapper[4779]: I0929 19:09:00.485487 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:00 crc kubenswrapper[4779]: I0929 19:09:00.485496 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:00 crc kubenswrapper[4779]: I0929 19:09:00.485510 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:00 crc kubenswrapper[4779]: I0929 19:09:00.485519 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:00Z","lastTransitionTime":"2025-09-29T19:09:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:00 crc kubenswrapper[4779]: I0929 19:09:00.588400 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:00 crc kubenswrapper[4779]: I0929 19:09:00.588485 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:00 crc kubenswrapper[4779]: I0929 19:09:00.588503 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:00 crc kubenswrapper[4779]: I0929 19:09:00.588557 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:00 crc kubenswrapper[4779]: I0929 19:09:00.588571 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:00Z","lastTransitionTime":"2025-09-29T19:09:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:00 crc kubenswrapper[4779]: I0929 19:09:00.691402 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:00 crc kubenswrapper[4779]: I0929 19:09:00.691467 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:00 crc kubenswrapper[4779]: I0929 19:09:00.691484 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:00 crc kubenswrapper[4779]: I0929 19:09:00.691512 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:00 crc kubenswrapper[4779]: I0929 19:09:00.691529 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:00Z","lastTransitionTime":"2025-09-29T19:09:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:00 crc kubenswrapper[4779]: I0929 19:09:00.765873 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 19:09:00 crc kubenswrapper[4779]: I0929 19:09:00.765932 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 19:09:00 crc kubenswrapper[4779]: I0929 19:09:00.765993 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-2rtwf" Sep 29 19:09:00 crc kubenswrapper[4779]: E0929 19:09:00.766071 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 19:09:00 crc kubenswrapper[4779]: I0929 19:09:00.766117 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 19:09:00 crc kubenswrapper[4779]: E0929 19:09:00.766359 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-2rtwf" podUID="4df079c4-34e3-4132-91bb-ad68488552f8" Sep 29 19:09:00 crc kubenswrapper[4779]: E0929 19:09:00.766475 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 19:09:00 crc kubenswrapper[4779]: E0929 19:09:00.767004 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 19:09:00 crc kubenswrapper[4779]: I0929 19:09:00.767606 4779 scope.go:117] "RemoveContainer" containerID="418a68d2ab745fecfe78084a541301c6af31b6ece2ccb58d61be58b2989235ba" Sep 29 19:09:00 crc kubenswrapper[4779]: I0929 19:09:00.793900 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:00 crc kubenswrapper[4779]: I0929 19:09:00.794505 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:00 crc kubenswrapper[4779]: I0929 19:09:00.794518 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:00 crc kubenswrapper[4779]: I0929 19:09:00.794538 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:00 crc kubenswrapper[4779]: I0929 19:09:00.794551 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:00Z","lastTransitionTime":"2025-09-29T19:09:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:00 crc kubenswrapper[4779]: I0929 19:09:00.897332 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:00 crc kubenswrapper[4779]: I0929 19:09:00.897386 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:00 crc kubenswrapper[4779]: I0929 19:09:00.897397 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:00 crc kubenswrapper[4779]: I0929 19:09:00.897417 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:00 crc kubenswrapper[4779]: I0929 19:09:00.897432 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:00Z","lastTransitionTime":"2025-09-29T19:09:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:01 crc kubenswrapper[4779]: I0929 19:09:01.000544 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:01 crc kubenswrapper[4779]: I0929 19:09:01.000588 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:01 crc kubenswrapper[4779]: I0929 19:09:01.000601 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:01 crc kubenswrapper[4779]: I0929 19:09:01.000622 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:01 crc kubenswrapper[4779]: I0929 19:09:01.000636 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:01Z","lastTransitionTime":"2025-09-29T19:09:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:01 crc kubenswrapper[4779]: I0929 19:09:01.103740 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:01 crc kubenswrapper[4779]: I0929 19:09:01.103800 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:01 crc kubenswrapper[4779]: I0929 19:09:01.103812 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:01 crc kubenswrapper[4779]: I0929 19:09:01.103834 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:01 crc kubenswrapper[4779]: I0929 19:09:01.103848 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:01Z","lastTransitionTime":"2025-09-29T19:09:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:01 crc kubenswrapper[4779]: I0929 19:09:01.207968 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:01 crc kubenswrapper[4779]: I0929 19:09:01.208047 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:01 crc kubenswrapper[4779]: I0929 19:09:01.208063 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:01 crc kubenswrapper[4779]: I0929 19:09:01.208091 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:01 crc kubenswrapper[4779]: I0929 19:09:01.208110 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:01Z","lastTransitionTime":"2025-09-29T19:09:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:01 crc kubenswrapper[4779]: I0929 19:09:01.311588 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:01 crc kubenswrapper[4779]: I0929 19:09:01.311644 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:01 crc kubenswrapper[4779]: I0929 19:09:01.311654 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:01 crc kubenswrapper[4779]: I0929 19:09:01.311672 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:01 crc kubenswrapper[4779]: I0929 19:09:01.311683 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:01Z","lastTransitionTime":"2025-09-29T19:09:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:01 crc kubenswrapper[4779]: I0929 19:09:01.414810 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:01 crc kubenswrapper[4779]: I0929 19:09:01.414851 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:01 crc kubenswrapper[4779]: I0929 19:09:01.414862 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:01 crc kubenswrapper[4779]: I0929 19:09:01.414882 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:01 crc kubenswrapper[4779]: I0929 19:09:01.414893 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:01Z","lastTransitionTime":"2025-09-29T19:09:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:01 crc kubenswrapper[4779]: I0929 19:09:01.517115 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:01 crc kubenswrapper[4779]: I0929 19:09:01.517157 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:01 crc kubenswrapper[4779]: I0929 19:09:01.517168 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:01 crc kubenswrapper[4779]: I0929 19:09:01.517183 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:01 crc kubenswrapper[4779]: I0929 19:09:01.517195 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:01Z","lastTransitionTime":"2025-09-29T19:09:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:01 crc kubenswrapper[4779]: I0929 19:09:01.620111 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:01 crc kubenswrapper[4779]: I0929 19:09:01.620163 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:01 crc kubenswrapper[4779]: I0929 19:09:01.620182 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:01 crc kubenswrapper[4779]: I0929 19:09:01.620206 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:01 crc kubenswrapper[4779]: I0929 19:09:01.620226 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:01Z","lastTransitionTime":"2025-09-29T19:09:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:01 crc kubenswrapper[4779]: I0929 19:09:01.722998 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:01 crc kubenswrapper[4779]: I0929 19:09:01.723047 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:01 crc kubenswrapper[4779]: I0929 19:09:01.723064 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:01 crc kubenswrapper[4779]: I0929 19:09:01.723086 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:01 crc kubenswrapper[4779]: I0929 19:09:01.723103 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:01Z","lastTransitionTime":"2025-09-29T19:09:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:01 crc kubenswrapper[4779]: I0929 19:09:01.825902 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:01 crc kubenswrapper[4779]: I0929 19:09:01.826011 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:01 crc kubenswrapper[4779]: I0929 19:09:01.826031 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:01 crc kubenswrapper[4779]: I0929 19:09:01.826059 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:01 crc kubenswrapper[4779]: I0929 19:09:01.826091 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:01Z","lastTransitionTime":"2025-09-29T19:09:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:01 crc kubenswrapper[4779]: I0929 19:09:01.929482 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:01 crc kubenswrapper[4779]: I0929 19:09:01.929531 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:01 crc kubenswrapper[4779]: I0929 19:09:01.929545 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:01 crc kubenswrapper[4779]: I0929 19:09:01.929565 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:01 crc kubenswrapper[4779]: I0929 19:09:01.929580 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:01Z","lastTransitionTime":"2025-09-29T19:09:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:02 crc kubenswrapper[4779]: I0929 19:09:02.033161 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:02 crc kubenswrapper[4779]: I0929 19:09:02.033234 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:02 crc kubenswrapper[4779]: I0929 19:09:02.033252 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:02 crc kubenswrapper[4779]: I0929 19:09:02.033283 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:02 crc kubenswrapper[4779]: I0929 19:09:02.033304 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:02Z","lastTransitionTime":"2025-09-29T19:09:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:02 crc kubenswrapper[4779]: I0929 19:09:02.136476 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:02 crc kubenswrapper[4779]: I0929 19:09:02.136532 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:02 crc kubenswrapper[4779]: I0929 19:09:02.136545 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:02 crc kubenswrapper[4779]: I0929 19:09:02.136566 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:02 crc kubenswrapper[4779]: I0929 19:09:02.136582 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:02Z","lastTransitionTime":"2025-09-29T19:09:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:02 crc kubenswrapper[4779]: I0929 19:09:02.240057 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:02 crc kubenswrapper[4779]: I0929 19:09:02.240117 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:02 crc kubenswrapper[4779]: I0929 19:09:02.240129 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:02 crc kubenswrapper[4779]: I0929 19:09:02.240149 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:02 crc kubenswrapper[4779]: I0929 19:09:02.240161 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:02Z","lastTransitionTime":"2025-09-29T19:09:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:02 crc kubenswrapper[4779]: I0929 19:09:02.343685 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:02 crc kubenswrapper[4779]: I0929 19:09:02.343762 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:02 crc kubenswrapper[4779]: I0929 19:09:02.343781 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:02 crc kubenswrapper[4779]: I0929 19:09:02.343807 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:02 crc kubenswrapper[4779]: I0929 19:09:02.343827 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:02Z","lastTransitionTime":"2025-09-29T19:09:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:02 crc kubenswrapper[4779]: I0929 19:09:02.447267 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:02 crc kubenswrapper[4779]: I0929 19:09:02.447383 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:02 crc kubenswrapper[4779]: I0929 19:09:02.447396 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:02 crc kubenswrapper[4779]: I0929 19:09:02.447430 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:02 crc kubenswrapper[4779]: I0929 19:09:02.447446 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:02Z","lastTransitionTime":"2025-09-29T19:09:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:02 crc kubenswrapper[4779]: I0929 19:09:02.550996 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:02 crc kubenswrapper[4779]: I0929 19:09:02.551058 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:02 crc kubenswrapper[4779]: I0929 19:09:02.551069 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:02 crc kubenswrapper[4779]: I0929 19:09:02.551094 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:02 crc kubenswrapper[4779]: I0929 19:09:02.551111 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:02Z","lastTransitionTime":"2025-09-29T19:09:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:02 crc kubenswrapper[4779]: I0929 19:09:02.653855 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:02 crc kubenswrapper[4779]: I0929 19:09:02.653925 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:02 crc kubenswrapper[4779]: I0929 19:09:02.653943 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:02 crc kubenswrapper[4779]: I0929 19:09:02.653969 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:02 crc kubenswrapper[4779]: I0929 19:09:02.653991 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:02Z","lastTransitionTime":"2025-09-29T19:09:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:02 crc kubenswrapper[4779]: I0929 19:09:02.690042 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 19:09:02 crc kubenswrapper[4779]: I0929 19:09:02.690250 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 19:09:02 crc kubenswrapper[4779]: I0929 19:09:02.690298 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 19:09:02 crc kubenswrapper[4779]: I0929 19:09:02.690412 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 19:09:02 crc kubenswrapper[4779]: E0929 19:09:02.690459 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 19:09:34.690412098 +0000 UTC m=+85.574837248 (durationBeforeRetry 32s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 19:09:02 crc kubenswrapper[4779]: E0929 19:09:02.690607 4779 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Sep 29 19:09:02 crc kubenswrapper[4779]: I0929 19:09:02.690614 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 19:09:02 crc kubenswrapper[4779]: E0929 19:09:02.690642 4779 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Sep 29 19:09:02 crc kubenswrapper[4779]: E0929 19:09:02.690739 4779 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 29 19:09:02 crc kubenswrapper[4779]: E0929 19:09:02.690734 4779 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Sep 29 19:09:02 crc kubenswrapper[4779]: E0929 19:09:02.690818 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-09-29 19:09:34.690798898 +0000 UTC m=+85.575224288 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 29 19:09:02 crc kubenswrapper[4779]: E0929 19:09:02.690657 4779 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Sep 29 19:09:02 crc kubenswrapper[4779]: E0929 19:09:02.690860 4779 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Sep 29 19:09:02 crc kubenswrapper[4779]: E0929 19:09:02.690875 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-09-29 19:09:34.69084072 +0000 UTC m=+85.575265860 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Sep 29 19:09:02 crc kubenswrapper[4779]: E0929 19:09:02.690882 4779 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 29 19:09:02 crc kubenswrapper[4779]: E0929 19:09:02.690983 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-09-29 19:09:34.690965553 +0000 UTC m=+85.575390683 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 29 19:09:02 crc kubenswrapper[4779]: E0929 19:09:02.690694 4779 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Sep 29 19:09:02 crc kubenswrapper[4779]: E0929 19:09:02.691069 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-09-29 19:09:34.691051025 +0000 UTC m=+85.575476445 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Sep 29 19:09:02 crc kubenswrapper[4779]: I0929 19:09:02.757019 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:02 crc kubenswrapper[4779]: I0929 19:09:02.757542 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:02 crc kubenswrapper[4779]: I0929 19:09:02.757610 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:02 crc kubenswrapper[4779]: I0929 19:09:02.757653 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:02 crc kubenswrapper[4779]: I0929 19:09:02.757679 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:02Z","lastTransitionTime":"2025-09-29T19:09:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:02 crc kubenswrapper[4779]: I0929 19:09:02.766227 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 19:09:02 crc kubenswrapper[4779]: I0929 19:09:02.766295 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 19:09:02 crc kubenswrapper[4779]: I0929 19:09:02.766423 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-2rtwf" Sep 29 19:09:02 crc kubenswrapper[4779]: E0929 19:09:02.766702 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 19:09:02 crc kubenswrapper[4779]: I0929 19:09:02.766740 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 19:09:02 crc kubenswrapper[4779]: E0929 19:09:02.766907 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 19:09:02 crc kubenswrapper[4779]: E0929 19:09:02.767047 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 19:09:02 crc kubenswrapper[4779]: E0929 19:09:02.767770 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-2rtwf" podUID="4df079c4-34e3-4132-91bb-ad68488552f8" Sep 29 19:09:02 crc kubenswrapper[4779]: I0929 19:09:02.862606 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:02 crc kubenswrapper[4779]: I0929 19:09:02.862657 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:02 crc kubenswrapper[4779]: I0929 19:09:02.862672 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:02 crc kubenswrapper[4779]: I0929 19:09:02.862691 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:02 crc kubenswrapper[4779]: I0929 19:09:02.862704 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:02Z","lastTransitionTime":"2025-09-29T19:09:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:02 crc kubenswrapper[4779]: I0929 19:09:02.976105 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:02 crc kubenswrapper[4779]: I0929 19:09:02.976166 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:02 crc kubenswrapper[4779]: I0929 19:09:02.976175 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:02 crc kubenswrapper[4779]: I0929 19:09:02.976190 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:02 crc kubenswrapper[4779]: I0929 19:09:02.976202 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:02Z","lastTransitionTime":"2025-09-29T19:09:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:03 crc kubenswrapper[4779]: I0929 19:09:03.079061 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:03 crc kubenswrapper[4779]: I0929 19:09:03.079099 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:03 crc kubenswrapper[4779]: I0929 19:09:03.079107 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:03 crc kubenswrapper[4779]: I0929 19:09:03.079121 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:03 crc kubenswrapper[4779]: I0929 19:09:03.079130 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:03Z","lastTransitionTime":"2025-09-29T19:09:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:03 crc kubenswrapper[4779]: I0929 19:09:03.107365 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-42vjg_046df2ef-fb75-4d32-93e6-17b36af0a7c2/ovnkube-controller/1.log" Sep 29 19:09:03 crc kubenswrapper[4779]: I0929 19:09:03.110350 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-42vjg" event={"ID":"046df2ef-fb75-4d32-93e6-17b36af0a7c2","Type":"ContainerStarted","Data":"23219c41b54182e6c1c57246636336210c37b9a2c9b3beedbb57daee3d909433"} Sep 29 19:09:03 crc kubenswrapper[4779]: I0929 19:09:03.110968 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-42vjg" Sep 29 19:09:03 crc kubenswrapper[4779]: I0929 19:09:03.127501 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"49dd46af-a7fa-47b0-94ab-c9a999564fca\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a67efc46aeb5956568b680b4041d83a55c0410ccefdb4e43b95503c6416e1dfc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2253a3406f6de9463c1c615ad2ac76ce935067baa57b260f18afdb8d9e639e95\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa9b804f65dc9265342d6ec10b8cdd31d49f692289c7fd8292944e25a2c95ebe\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://02428a90ed2dd1c36d800ffda10ed8759d92a87ec8f467cdf9fb11511b9b9420\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:09Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:09:03Z is after 2025-08-24T17:21:41Z" Sep 29 19:09:03 crc kubenswrapper[4779]: I0929 19:09:03.142197 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ef5a50f788b42b32f4ec04eedc9f5c9dbcd07f64b430ec684d8a91b414e6c18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb910a5c408a63f6c4391cf2fae320e14d6dc945125b53eefde3b666ef9d0754\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:09:03Z is after 2025-08-24T17:21:41Z" Sep 29 19:09:03 crc kubenswrapper[4779]: I0929 19:09:03.156114 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://15b9c6a3d7c79b30673cd8040a9f3033e2405cf2e29b38d8ba2bee9cbb384794\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:09:03Z is after 2025-08-24T17:21:41Z" Sep 29 19:09:03 crc kubenswrapper[4779]: I0929 19:09:03.172429 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jfbb6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3ac24bbf-c37a-4253-be71-8d8f15cfd48e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2ea6959ae00f851559cc059498a3a7848fbbb55f6a25ed82e38efe62a7ee85ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8ckjq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:30Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jfbb6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:09:03Z is after 2025-08-24T17:21:41Z" Sep 29 19:09:03 crc kubenswrapper[4779]: I0929 19:09:03.181994 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:03 crc kubenswrapper[4779]: I0929 19:09:03.182053 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:03 crc kubenswrapper[4779]: I0929 19:09:03.182067 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:03 crc kubenswrapper[4779]: I0929 19:09:03.182089 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:03 crc kubenswrapper[4779]: I0929 19:09:03.182103 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:03Z","lastTransitionTime":"2025-09-29T19:09:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:03 crc kubenswrapper[4779]: I0929 19:09:03.197099 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-cvx8m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"54a33b8e-b623-4f91-be1d-a38dfcef17d7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://49599fb872eb458cb3fd77da63518f77653f671e82a8fc1db5f9e2b64e3c4d1d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e697e7c54a1b8adfde2dfa86399bfd13d895196c12108ac625bd9b94ad913d1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e697e7c54a1b8adfde2dfa86399bfd13d895196c12108ac625bd9b94ad913d1e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://75932bd27c986408eb2a3ec9aefffa392bf1ede4be0ddeabb9fdcd5fcc56199d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://75932bd27c986408eb2a3ec9aefffa392bf1ede4be0ddeabb9fdcd5fcc56199d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e058271bf65bb6f216298a9fc58b97c76335275829be7e5bec59d92688e83a05\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e058271bf65bb6f216298a9fc58b97c76335275829be7e5bec59d92688e83a05\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://edec2f7ae27e72dcc2eac62281dc9b866cdc31ecbda5e24020043aad86a44784\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://edec2f7ae27e72dcc2eac62281dc9b866cdc31ecbda5e24020043aad86a44784\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1750034ffcb61628b437e7b4f604be66ea6401d0ddb8a70f37fbb358c261670f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1750034ffcb61628b437e7b4f604be66ea6401d0ddb8a70f37fbb358c261670f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3409883fc5d9ed6b5e08d50c9d2821a4ac0b8932c9a0296c12065b0abb0354d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3409883fc5d9ed6b5e08d50c9d2821a4ac0b8932c9a0296c12065b0abb0354d6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:30Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-cvx8m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:09:03Z is after 2025-08-24T17:21:41Z" Sep 29 19:09:03 crc kubenswrapper[4779]: I0929 19:09:03.217799 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2f701cb-2e7d-4cea-8455-f68605964ac6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dfa32890cde2a0a47211239f797f83380b4418ad2b8fb1560c779705e68df39c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a017f2621de03e0184a0a168819a6762e4b36a1ae5c66a6bcfd1d65266fcb52\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a9691127cc0437ceebef7a82219adb83625690db4d6bf5cf1c8177a6a9e1284\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://19af6b603d6261471bf10fbf65244aa6842b15a02fbbdea09a8ab2d01ec1e1a4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b14ae0b08f077b3acbe802e4a5ec06dd10075100c37d5587119a09a92b4651a\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T19:08:29Z\\\",\\\"message\\\":\\\"le observer\\\\nW0929 19:08:29.408025 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0929 19:08:29.408423 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 19:08:29.411188 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1038548135/tls.crt::/tmp/serving-cert-1038548135/tls.key\\\\\\\"\\\\nI0929 19:08:29.848353 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0929 19:08:29.866865 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0929 19:08:29.866980 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0929 19:08:29.867032 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0929 19:08:29.867061 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0929 19:08:29.872666 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0929 19:08:29.872766 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0929 19:08:29.872793 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0929 19:08:29.872817 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0929 19:08:29.872839 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0929 19:08:29.872861 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0929 19:08:29.872883 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0929 19:08:29.873077 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0929 19:08:29.875629 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:24Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f0916d37ad679d190403229b40390af2860e24f777a07cd5ef0ac2f7d3fc3c41\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5a737775c31cf8dc37838d904ee00f1075bc2aa52bea63587806cb7486950eaf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5a737775c31cf8dc37838d904ee00f1075bc2aa52bea63587806cb7486950eaf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:09Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:09:03Z is after 2025-08-24T17:21:41Z" Sep 29 19:09:03 crc kubenswrapper[4779]: I0929 19:09:03.237301 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:09:03Z is after 2025-08-24T17:21:41Z" Sep 29 19:09:03 crc kubenswrapper[4779]: I0929 19:09:03.258622 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-42vjg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"046df2ef-fb75-4d32-93e6-17b36af0a7c2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://629d703a556665e86ce3b0a5af995157440c71c8ab7d29b9259865cd624543c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1555e96e70a9119cf2298d91512b1d8cffb303b4327d0e20bdf686e7e36e4461\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0d18ddf7528c789be80d32cd6921ada5ed114d01130b7be97e73ec6d9168267e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b6ffbcb52a6e250cd4b4bc4e8c089ef94babaaf0db836fec9f477450464eebe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://76304a56be996b646d706c0e7df995bb1518962396c7558304b1627e5c215c11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6dcd0277739f6d5e5c90c617485f053c1c3996192f15b081c660741987ec2611\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://23219c41b54182e6c1c57246636336210c37b9a2c9b3beedbb57daee3d909433\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://418a68d2ab745fecfe78084a541301c6af31b6ece2ccb58d61be58b2989235ba\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T19:08:41Z\\\",\\\"message\\\":\\\"rr: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:41Z is after 2025-08-24T17:21:41Z]\\\\nI0929 19:08:41.905972 6175 model_client.go:382] Update operations generated as: [{Op:update Table:NAT Row:map[external_ip:192.168.126.11 logical_ip:10.217.0.92 options:{GoMap:map[stateless:false]} type:snat] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {73135118-cf1b-4568-bd31-2f50308bf69d}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI0929 19:08:41.905727 6175 services_controller.go:434] Service default/kubernetes retrieved from lister for network=default: \\\\u0026Service{ObjectMeta:{kubernetes default 1fcaffea-cfe2-4295-9c2a-a3b3626fb3f1 259 0 2025-02-23 05:11:12 +0000 UTC \\\\u003cnil\\\\u003e \\\\u003cnil\\\\u003e map[component:apiserver provider:kubernetes] map[] [] [] []},Spec:Servic\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:41Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:09:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c8993f5be0b77b752f60e7346f609aaeef20db2f0e72cbebde06a11fa7d2f5bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e1e03c63d08ebe2a4b76954140684a5b43da9e92b93490aa7efa715fc2aefb4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e1e03c63d08ebe2a4b76954140684a5b43da9e92b93490aa7efa715fc2aefb4c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:30Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-42vjg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:09:03Z is after 2025-08-24T17:21:41Z" Sep 29 19:09:03 crc kubenswrapper[4779]: I0929 19:09:03.271481 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-zxpg4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e0db9b28-7e3a-4d44-9e98-1a07c8e5b8d6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2f0f837dfb05912fb4f7b0cb08cdb363cb3bfe2cc91a0bebd1b2b2b8128d78cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6ghv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:33Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-zxpg4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:09:03Z is after 2025-08-24T17:21:41Z" Sep 29 19:09:03 crc kubenswrapper[4779]: I0929 19:09:03.281058 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-7hb2m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"45b89d12-bbd2-4b47-815d-a7421cc1aa00\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://002d12663649198e6136f3e9df16a3b1cd8c64906bf39bde03cd0dde8c36abbc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bvn4g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:30Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-7hb2m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:09:03Z is after 2025-08-24T17:21:41Z" Sep 29 19:09:03 crc kubenswrapper[4779]: I0929 19:09:03.285477 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:03 crc kubenswrapper[4779]: I0929 19:09:03.285530 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:03 crc kubenswrapper[4779]: I0929 19:09:03.285543 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:03 crc kubenswrapper[4779]: I0929 19:09:03.285565 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:03 crc kubenswrapper[4779]: I0929 19:09:03.285580 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:03Z","lastTransitionTime":"2025-09-29T19:09:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:03 crc kubenswrapper[4779]: I0929 19:09:03.292633 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:09:03Z is after 2025-08-24T17:21:41Z" Sep 29 19:09:03 crc kubenswrapper[4779]: I0929 19:09:03.302572 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fvdtc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"97c4781c-2d4b-4eab-96fb-39a342c2d4a0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c2179d9cf26eb6d6fcc63666d3dd53e9a1a66cf633c9fe0b29e6d6ac726c8119\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7cxcv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3aeed56672d12f2355eda710f9021c6271debfaf1f7b7d6b1ff5f4ecc6764e2e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7cxcv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:43Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-fvdtc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:09:03Z is after 2025-08-24T17:21:41Z" Sep 29 19:09:03 crc kubenswrapper[4779]: I0929 19:09:03.312759 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:09:03Z is after 2025-08-24T17:21:41Z" Sep 29 19:09:03 crc kubenswrapper[4779]: I0929 19:09:03.324681 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://04d9dd8d319497dd3ed7c565217d8b9d228a1952a3cd0e8dee3cdad781c5770a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:09:03Z is after 2025-08-24T17:21:41Z" Sep 29 19:09:03 crc kubenswrapper[4779]: I0929 19:09:03.338243 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"476bc421-1113-455e-bcc8-e207e47dad19\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://30844145987b3dae28d2ca3f75a36e21c27216b267ff40fdb83094b8045403c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tb7zk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7168ecfffda2ef4f7782650466ccebb8abcff3293fcd33d44eb4ee24abf6339c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tb7zk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:30Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-d5cnr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:09:03Z is after 2025-08-24T17:21:41Z" Sep 29 19:09:03 crc kubenswrapper[4779]: I0929 19:09:03.350987 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-2rtwf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4df079c4-34e3-4132-91bb-ad68488552f8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7sr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7sr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:44Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-2rtwf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:09:03Z is after 2025-08-24T17:21:41Z" Sep 29 19:09:03 crc kubenswrapper[4779]: I0929 19:09:03.388188 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:03 crc kubenswrapper[4779]: I0929 19:09:03.388239 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:03 crc kubenswrapper[4779]: I0929 19:09:03.388250 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:03 crc kubenswrapper[4779]: I0929 19:09:03.388272 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:03 crc kubenswrapper[4779]: I0929 19:09:03.388283 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:03Z","lastTransitionTime":"2025-09-29T19:09:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:03 crc kubenswrapper[4779]: I0929 19:09:03.490869 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:03 crc kubenswrapper[4779]: I0929 19:09:03.490906 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:03 crc kubenswrapper[4779]: I0929 19:09:03.490914 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:03 crc kubenswrapper[4779]: I0929 19:09:03.490926 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:03 crc kubenswrapper[4779]: I0929 19:09:03.490935 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:03Z","lastTransitionTime":"2025-09-29T19:09:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:03 crc kubenswrapper[4779]: I0929 19:09:03.593800 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:03 crc kubenswrapper[4779]: I0929 19:09:03.594299 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:03 crc kubenswrapper[4779]: I0929 19:09:03.594329 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:03 crc kubenswrapper[4779]: I0929 19:09:03.594352 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:03 crc kubenswrapper[4779]: I0929 19:09:03.594745 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:03Z","lastTransitionTime":"2025-09-29T19:09:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:03 crc kubenswrapper[4779]: I0929 19:09:03.697533 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:03 crc kubenswrapper[4779]: I0929 19:09:03.697603 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:03 crc kubenswrapper[4779]: I0929 19:09:03.697623 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:03 crc kubenswrapper[4779]: I0929 19:09:03.697649 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:03 crc kubenswrapper[4779]: I0929 19:09:03.697666 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:03Z","lastTransitionTime":"2025-09-29T19:09:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:03 crc kubenswrapper[4779]: I0929 19:09:03.800032 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:03 crc kubenswrapper[4779]: I0929 19:09:03.800078 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:03 crc kubenswrapper[4779]: I0929 19:09:03.800094 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:03 crc kubenswrapper[4779]: I0929 19:09:03.800118 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:03 crc kubenswrapper[4779]: I0929 19:09:03.800136 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:03Z","lastTransitionTime":"2025-09-29T19:09:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:03 crc kubenswrapper[4779]: I0929 19:09:03.903391 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:03 crc kubenswrapper[4779]: I0929 19:09:03.903430 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:03 crc kubenswrapper[4779]: I0929 19:09:03.903440 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:03 crc kubenswrapper[4779]: I0929 19:09:03.903456 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:03 crc kubenswrapper[4779]: I0929 19:09:03.903467 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:03Z","lastTransitionTime":"2025-09-29T19:09:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:04 crc kubenswrapper[4779]: I0929 19:09:04.006109 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:04 crc kubenswrapper[4779]: I0929 19:09:04.006171 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:04 crc kubenswrapper[4779]: I0929 19:09:04.006188 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:04 crc kubenswrapper[4779]: I0929 19:09:04.006211 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:04 crc kubenswrapper[4779]: I0929 19:09:04.006233 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:04Z","lastTransitionTime":"2025-09-29T19:09:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:04 crc kubenswrapper[4779]: I0929 19:09:04.108764 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:04 crc kubenswrapper[4779]: I0929 19:09:04.108836 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:04 crc kubenswrapper[4779]: I0929 19:09:04.108853 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:04 crc kubenswrapper[4779]: I0929 19:09:04.109306 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:04 crc kubenswrapper[4779]: I0929 19:09:04.109420 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:04Z","lastTransitionTime":"2025-09-29T19:09:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:04 crc kubenswrapper[4779]: I0929 19:09:04.117568 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-42vjg_046df2ef-fb75-4d32-93e6-17b36af0a7c2/ovnkube-controller/2.log" Sep 29 19:09:04 crc kubenswrapper[4779]: I0929 19:09:04.118233 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-42vjg_046df2ef-fb75-4d32-93e6-17b36af0a7c2/ovnkube-controller/1.log" Sep 29 19:09:04 crc kubenswrapper[4779]: I0929 19:09:04.120482 4779 generic.go:334] "Generic (PLEG): container finished" podID="046df2ef-fb75-4d32-93e6-17b36af0a7c2" containerID="23219c41b54182e6c1c57246636336210c37b9a2c9b3beedbb57daee3d909433" exitCode=1 Sep 29 19:09:04 crc kubenswrapper[4779]: I0929 19:09:04.120532 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-42vjg" event={"ID":"046df2ef-fb75-4d32-93e6-17b36af0a7c2","Type":"ContainerDied","Data":"23219c41b54182e6c1c57246636336210c37b9a2c9b3beedbb57daee3d909433"} Sep 29 19:09:04 crc kubenswrapper[4779]: I0929 19:09:04.120569 4779 scope.go:117] "RemoveContainer" containerID="418a68d2ab745fecfe78084a541301c6af31b6ece2ccb58d61be58b2989235ba" Sep 29 19:09:04 crc kubenswrapper[4779]: I0929 19:09:04.121397 4779 scope.go:117] "RemoveContainer" containerID="23219c41b54182e6c1c57246636336210c37b9a2c9b3beedbb57daee3d909433" Sep 29 19:09:04 crc kubenswrapper[4779]: E0929 19:09:04.121555 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-42vjg_openshift-ovn-kubernetes(046df2ef-fb75-4d32-93e6-17b36af0a7c2)\"" pod="openshift-ovn-kubernetes/ovnkube-node-42vjg" podUID="046df2ef-fb75-4d32-93e6-17b36af0a7c2" Sep 29 19:09:04 crc kubenswrapper[4779]: I0929 19:09:04.146223 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-cvx8m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"54a33b8e-b623-4f91-be1d-a38dfcef17d7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://49599fb872eb458cb3fd77da63518f77653f671e82a8fc1db5f9e2b64e3c4d1d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e697e7c54a1b8adfde2dfa86399bfd13d895196c12108ac625bd9b94ad913d1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e697e7c54a1b8adfde2dfa86399bfd13d895196c12108ac625bd9b94ad913d1e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://75932bd27c986408eb2a3ec9aefffa392bf1ede4be0ddeabb9fdcd5fcc56199d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://75932bd27c986408eb2a3ec9aefffa392bf1ede4be0ddeabb9fdcd5fcc56199d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e058271bf65bb6f216298a9fc58b97c76335275829be7e5bec59d92688e83a05\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e058271bf65bb6f216298a9fc58b97c76335275829be7e5bec59d92688e83a05\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://edec2f7ae27e72dcc2eac62281dc9b866cdc31ecbda5e24020043aad86a44784\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://edec2f7ae27e72dcc2eac62281dc9b866cdc31ecbda5e24020043aad86a44784\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1750034ffcb61628b437e7b4f604be66ea6401d0ddb8a70f37fbb358c261670f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1750034ffcb61628b437e7b4f604be66ea6401d0ddb8a70f37fbb358c261670f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3409883fc5d9ed6b5e08d50c9d2821a4ac0b8932c9a0296c12065b0abb0354d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3409883fc5d9ed6b5e08d50c9d2821a4ac0b8932c9a0296c12065b0abb0354d6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:30Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-cvx8m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:09:04Z is after 2025-08-24T17:21:41Z" Sep 29 19:09:04 crc kubenswrapper[4779]: I0929 19:09:04.165724 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"49dd46af-a7fa-47b0-94ab-c9a999564fca\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a67efc46aeb5956568b680b4041d83a55c0410ccefdb4e43b95503c6416e1dfc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2253a3406f6de9463c1c615ad2ac76ce935067baa57b260f18afdb8d9e639e95\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa9b804f65dc9265342d6ec10b8cdd31d49f692289c7fd8292944e25a2c95ebe\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://02428a90ed2dd1c36d800ffda10ed8759d92a87ec8f467cdf9fb11511b9b9420\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:09Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:09:04Z is after 2025-08-24T17:21:41Z" Sep 29 19:09:04 crc kubenswrapper[4779]: I0929 19:09:04.179491 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ef5a50f788b42b32f4ec04eedc9f5c9dbcd07f64b430ec684d8a91b414e6c18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb910a5c408a63f6c4391cf2fae320e14d6dc945125b53eefde3b666ef9d0754\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:09:04Z is after 2025-08-24T17:21:41Z" Sep 29 19:09:04 crc kubenswrapper[4779]: I0929 19:09:04.196490 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://15b9c6a3d7c79b30673cd8040a9f3033e2405cf2e29b38d8ba2bee9cbb384794\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:09:04Z is after 2025-08-24T17:21:41Z" Sep 29 19:09:04 crc kubenswrapper[4779]: I0929 19:09:04.210621 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jfbb6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3ac24bbf-c37a-4253-be71-8d8f15cfd48e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2ea6959ae00f851559cc059498a3a7848fbbb55f6a25ed82e38efe62a7ee85ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8ckjq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:30Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jfbb6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:09:04Z is after 2025-08-24T17:21:41Z" Sep 29 19:09:04 crc kubenswrapper[4779]: I0929 19:09:04.212886 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:04 crc kubenswrapper[4779]: I0929 19:09:04.212933 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:04 crc kubenswrapper[4779]: I0929 19:09:04.212945 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:04 crc kubenswrapper[4779]: I0929 19:09:04.212961 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:04 crc kubenswrapper[4779]: I0929 19:09:04.212973 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:04Z","lastTransitionTime":"2025-09-29T19:09:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:04 crc kubenswrapper[4779]: I0929 19:09:04.225878 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2f701cb-2e7d-4cea-8455-f68605964ac6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dfa32890cde2a0a47211239f797f83380b4418ad2b8fb1560c779705e68df39c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a017f2621de03e0184a0a168819a6762e4b36a1ae5c66a6bcfd1d65266fcb52\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a9691127cc0437ceebef7a82219adb83625690db4d6bf5cf1c8177a6a9e1284\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://19af6b603d6261471bf10fbf65244aa6842b15a02fbbdea09a8ab2d01ec1e1a4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b14ae0b08f077b3acbe802e4a5ec06dd10075100c37d5587119a09a92b4651a\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T19:08:29Z\\\",\\\"message\\\":\\\"le observer\\\\nW0929 19:08:29.408025 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0929 19:08:29.408423 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 19:08:29.411188 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1038548135/tls.crt::/tmp/serving-cert-1038548135/tls.key\\\\\\\"\\\\nI0929 19:08:29.848353 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0929 19:08:29.866865 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0929 19:08:29.866980 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0929 19:08:29.867032 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0929 19:08:29.867061 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0929 19:08:29.872666 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0929 19:08:29.872766 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0929 19:08:29.872793 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0929 19:08:29.872817 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0929 19:08:29.872839 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0929 19:08:29.872861 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0929 19:08:29.872883 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0929 19:08:29.873077 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0929 19:08:29.875629 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:24Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f0916d37ad679d190403229b40390af2860e24f777a07cd5ef0ac2f7d3fc3c41\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5a737775c31cf8dc37838d904ee00f1075bc2aa52bea63587806cb7486950eaf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5a737775c31cf8dc37838d904ee00f1075bc2aa52bea63587806cb7486950eaf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:09Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:09:04Z is after 2025-08-24T17:21:41Z" Sep 29 19:09:04 crc kubenswrapper[4779]: I0929 19:09:04.238732 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:09:04Z is after 2025-08-24T17:21:41Z" Sep 29 19:09:04 crc kubenswrapper[4779]: I0929 19:09:04.264204 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-42vjg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"046df2ef-fb75-4d32-93e6-17b36af0a7c2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://629d703a556665e86ce3b0a5af995157440c71c8ab7d29b9259865cd624543c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1555e96e70a9119cf2298d91512b1d8cffb303b4327d0e20bdf686e7e36e4461\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0d18ddf7528c789be80d32cd6921ada5ed114d01130b7be97e73ec6d9168267e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b6ffbcb52a6e250cd4b4bc4e8c089ef94babaaf0db836fec9f477450464eebe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://76304a56be996b646d706c0e7df995bb1518962396c7558304b1627e5c215c11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6dcd0277739f6d5e5c90c617485f053c1c3996192f15b081c660741987ec2611\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://23219c41b54182e6c1c57246636336210c37b9a2c9b3beedbb57daee3d909433\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://418a68d2ab745fecfe78084a541301c6af31b6ece2ccb58d61be58b2989235ba\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T19:08:41Z\\\",\\\"message\\\":\\\"rr: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:08:41Z is after 2025-08-24T17:21:41Z]\\\\nI0929 19:08:41.905972 6175 model_client.go:382] Update operations generated as: [{Op:update Table:NAT Row:map[external_ip:192.168.126.11 logical_ip:10.217.0.92 options:{GoMap:map[stateless:false]} type:snat] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {73135118-cf1b-4568-bd31-2f50308bf69d}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI0929 19:08:41.905727 6175 services_controller.go:434] Service default/kubernetes retrieved from lister for network=default: \\\\u0026Service{ObjectMeta:{kubernetes default 1fcaffea-cfe2-4295-9c2a-a3b3626fb3f1 259 0 2025-02-23 05:11:12 +0000 UTC \\\\u003cnil\\\\u003e \\\\u003cnil\\\\u003e map[component:apiserver provider:kubernetes] map[] [] [] []},Spec:Servic\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:41Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://23219c41b54182e6c1c57246636336210c37b9a2c9b3beedbb57daee3d909433\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T19:09:03Z\\\",\\\"message\\\":\\\"r network=default : 881.553µs\\\\nI0929 19:09:03.467164 6425 obj_retry.go:303] Retry object setup: *v1.Pod openshift-network-operator/network-operator-58b4c7f79c-55gtf\\\\nI0929 19:09:03.466800 6425 services_controller.go:356] Processing sync for service openshift-dns-operator/metrics for network=default\\\\nI0929 19:09:03.467178 6425 obj_retry.go:365] Adding new object: *v1.Pod openshift-network-operator/network-operator-58b4c7f79c-55gtf\\\\nI0929 19:09:03.467185 6425 ovn.go:134] Ensuring zone local for Pod openshift-network-operator/network-operator-58b4c7f79c-55gtf in node crc\\\\nI0929 19:09:03.467190 6425 obj_retry.go:386] Retry successful for *v1.Pod openshift-network-operator/network-operator-58b4c7f79c-55gtf after 0 failed attempt(s)\\\\nI0929 19:09:03.467195 6425 default_network_controller.go:776] Recording success event on pod openshift-network-operator/network-operator-58b4c7f79c-55gtf\\\\nI0929 19:09:03.466832 6425 services_controller.go:434] Service default/kubernetes retrieved from lister for network=default: \\\\u0026Service{ObjectMeta:{kubernetes default 1fcaffea-cfe2-4295-9c2a-a3b3626fb3f1 259 0 2025-02-23 05:11:12 +0000 UTC \\\\u003cnil\\\\u003e \\\\u003cnil\\\\u003e map[component:apiserver provider:kubernetes] map[] [] [] []},Spec:ServiceSpec{Ports:[]ServicePort{ServicePort{Name:https,Protocol:TCP,Port:443,TargetPort:{0 6443 },NodePort:0,AppProtocol:nil,},},Selector:m\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T19:09:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c8993f5be0b77b752f60e7346f609aaeef20db2f0e72cbebde06a11fa7d2f5bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e1e03c63d08ebe2a4b76954140684a5b43da9e92b93490aa7efa715fc2aefb4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e1e03c63d08ebe2a4b76954140684a5b43da9e92b93490aa7efa715fc2aefb4c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:30Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-42vjg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:09:04Z is after 2025-08-24T17:21:41Z" Sep 29 19:09:04 crc kubenswrapper[4779]: I0929 19:09:04.278477 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-zxpg4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e0db9b28-7e3a-4d44-9e98-1a07c8e5b8d6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2f0f837dfb05912fb4f7b0cb08cdb363cb3bfe2cc91a0bebd1b2b2b8128d78cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6ghv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:33Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-zxpg4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:09:04Z is after 2025-08-24T17:21:41Z" Sep 29 19:09:04 crc kubenswrapper[4779]: I0929 19:09:04.288790 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-7hb2m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"45b89d12-bbd2-4b47-815d-a7421cc1aa00\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://002d12663649198e6136f3e9df16a3b1cd8c64906bf39bde03cd0dde8c36abbc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bvn4g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:30Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-7hb2m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:09:04Z is after 2025-08-24T17:21:41Z" Sep 29 19:09:04 crc kubenswrapper[4779]: I0929 19:09:04.298111 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:09:04Z is after 2025-08-24T17:21:41Z" Sep 29 19:09:04 crc kubenswrapper[4779]: I0929 19:09:04.306713 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fvdtc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"97c4781c-2d4b-4eab-96fb-39a342c2d4a0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c2179d9cf26eb6d6fcc63666d3dd53e9a1a66cf633c9fe0b29e6d6ac726c8119\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7cxcv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3aeed56672d12f2355eda710f9021c6271debfaf1f7b7d6b1ff5f4ecc6764e2e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7cxcv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:43Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-fvdtc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:09:04Z is after 2025-08-24T17:21:41Z" Sep 29 19:09:04 crc kubenswrapper[4779]: I0929 19:09:04.315549 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:04 crc kubenswrapper[4779]: I0929 19:09:04.315599 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:04 crc kubenswrapper[4779]: I0929 19:09:04.315625 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:04 crc kubenswrapper[4779]: I0929 19:09:04.315644 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:04 crc kubenswrapper[4779]: I0929 19:09:04.315657 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:04Z","lastTransitionTime":"2025-09-29T19:09:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:04 crc kubenswrapper[4779]: I0929 19:09:04.318181 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:09:04Z is after 2025-08-24T17:21:41Z" Sep 29 19:09:04 crc kubenswrapper[4779]: I0929 19:09:04.330800 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://04d9dd8d319497dd3ed7c565217d8b9d228a1952a3cd0e8dee3cdad781c5770a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:09:04Z is after 2025-08-24T17:21:41Z" Sep 29 19:09:04 crc kubenswrapper[4779]: I0929 19:09:04.341010 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"476bc421-1113-455e-bcc8-e207e47dad19\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://30844145987b3dae28d2ca3f75a36e21c27216b267ff40fdb83094b8045403c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tb7zk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7168ecfffda2ef4f7782650466ccebb8abcff3293fcd33d44eb4ee24abf6339c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tb7zk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:30Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-d5cnr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:09:04Z is after 2025-08-24T17:21:41Z" Sep 29 19:09:04 crc kubenswrapper[4779]: I0929 19:09:04.351083 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-2rtwf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4df079c4-34e3-4132-91bb-ad68488552f8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7sr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7sr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:44Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-2rtwf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:09:04Z is after 2025-08-24T17:21:41Z" Sep 29 19:09:04 crc kubenswrapper[4779]: I0929 19:09:04.418100 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:04 crc kubenswrapper[4779]: I0929 19:09:04.418396 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:04 crc kubenswrapper[4779]: I0929 19:09:04.418535 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:04 crc kubenswrapper[4779]: I0929 19:09:04.418858 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:04 crc kubenswrapper[4779]: I0929 19:09:04.419009 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:04Z","lastTransitionTime":"2025-09-29T19:09:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:04 crc kubenswrapper[4779]: I0929 19:09:04.523611 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:04 crc kubenswrapper[4779]: I0929 19:09:04.523894 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:04 crc kubenswrapper[4779]: I0929 19:09:04.524003 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:04 crc kubenswrapper[4779]: I0929 19:09:04.524154 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:04 crc kubenswrapper[4779]: I0929 19:09:04.524262 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:04Z","lastTransitionTime":"2025-09-29T19:09:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:04 crc kubenswrapper[4779]: I0929 19:09:04.627707 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:04 crc kubenswrapper[4779]: I0929 19:09:04.627754 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:04 crc kubenswrapper[4779]: I0929 19:09:04.627771 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:04 crc kubenswrapper[4779]: I0929 19:09:04.627792 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:04 crc kubenswrapper[4779]: I0929 19:09:04.627810 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:04Z","lastTransitionTime":"2025-09-29T19:09:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:04 crc kubenswrapper[4779]: I0929 19:09:04.731633 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:04 crc kubenswrapper[4779]: I0929 19:09:04.732609 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:04 crc kubenswrapper[4779]: I0929 19:09:04.732911 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:04 crc kubenswrapper[4779]: I0929 19:09:04.733075 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:04 crc kubenswrapper[4779]: I0929 19:09:04.733227 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:04Z","lastTransitionTime":"2025-09-29T19:09:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:04 crc kubenswrapper[4779]: I0929 19:09:04.765624 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 19:09:04 crc kubenswrapper[4779]: I0929 19:09:04.765709 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-2rtwf" Sep 29 19:09:04 crc kubenswrapper[4779]: I0929 19:09:04.765719 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 19:09:04 crc kubenswrapper[4779]: I0929 19:09:04.765642 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 19:09:04 crc kubenswrapper[4779]: E0929 19:09:04.765823 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 19:09:04 crc kubenswrapper[4779]: E0929 19:09:04.766011 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-2rtwf" podUID="4df079c4-34e3-4132-91bb-ad68488552f8" Sep 29 19:09:04 crc kubenswrapper[4779]: E0929 19:09:04.766178 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 19:09:04 crc kubenswrapper[4779]: E0929 19:09:04.766462 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 19:09:04 crc kubenswrapper[4779]: I0929 19:09:04.836554 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:04 crc kubenswrapper[4779]: I0929 19:09:04.836631 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:04 crc kubenswrapper[4779]: I0929 19:09:04.836655 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:04 crc kubenswrapper[4779]: I0929 19:09:04.836691 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:04 crc kubenswrapper[4779]: I0929 19:09:04.836716 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:04Z","lastTransitionTime":"2025-09-29T19:09:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:04 crc kubenswrapper[4779]: I0929 19:09:04.939404 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:04 crc kubenswrapper[4779]: I0929 19:09:04.939479 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:04 crc kubenswrapper[4779]: I0929 19:09:04.939502 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:04 crc kubenswrapper[4779]: I0929 19:09:04.939532 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:04 crc kubenswrapper[4779]: I0929 19:09:04.939556 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:04Z","lastTransitionTime":"2025-09-29T19:09:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:05 crc kubenswrapper[4779]: I0929 19:09:05.043349 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:05 crc kubenswrapper[4779]: I0929 19:09:05.043409 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:05 crc kubenswrapper[4779]: I0929 19:09:05.043425 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:05 crc kubenswrapper[4779]: I0929 19:09:05.043448 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:05 crc kubenswrapper[4779]: I0929 19:09:05.043465 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:05Z","lastTransitionTime":"2025-09-29T19:09:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:05 crc kubenswrapper[4779]: I0929 19:09:05.127303 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-42vjg_046df2ef-fb75-4d32-93e6-17b36af0a7c2/ovnkube-controller/2.log" Sep 29 19:09:05 crc kubenswrapper[4779]: I0929 19:09:05.132499 4779 scope.go:117] "RemoveContainer" containerID="23219c41b54182e6c1c57246636336210c37b9a2c9b3beedbb57daee3d909433" Sep 29 19:09:05 crc kubenswrapper[4779]: E0929 19:09:05.132745 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-42vjg_openshift-ovn-kubernetes(046df2ef-fb75-4d32-93e6-17b36af0a7c2)\"" pod="openshift-ovn-kubernetes/ovnkube-node-42vjg" podUID="046df2ef-fb75-4d32-93e6-17b36af0a7c2" Sep 29 19:09:05 crc kubenswrapper[4779]: I0929 19:09:05.146622 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:05 crc kubenswrapper[4779]: I0929 19:09:05.146684 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:05 crc kubenswrapper[4779]: I0929 19:09:05.146703 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:05 crc kubenswrapper[4779]: I0929 19:09:05.146726 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:05 crc kubenswrapper[4779]: I0929 19:09:05.146743 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:05Z","lastTransitionTime":"2025-09-29T19:09:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:05 crc kubenswrapper[4779]: I0929 19:09:05.152543 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://04d9dd8d319497dd3ed7c565217d8b9d228a1952a3cd0e8dee3cdad781c5770a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:09:05Z is after 2025-08-24T17:21:41Z" Sep 29 19:09:05 crc kubenswrapper[4779]: I0929 19:09:05.169597 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"476bc421-1113-455e-bcc8-e207e47dad19\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://30844145987b3dae28d2ca3f75a36e21c27216b267ff40fdb83094b8045403c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tb7zk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7168ecfffda2ef4f7782650466ccebb8abcff3293fcd33d44eb4ee24abf6339c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tb7zk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:30Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-d5cnr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:09:05Z is after 2025-08-24T17:21:41Z" Sep 29 19:09:05 crc kubenswrapper[4779]: I0929 19:09:05.185179 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-2rtwf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4df079c4-34e3-4132-91bb-ad68488552f8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7sr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7sr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:44Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-2rtwf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:09:05Z is after 2025-08-24T17:21:41Z" Sep 29 19:09:05 crc kubenswrapper[4779]: I0929 19:09:05.206931 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:09:05Z is after 2025-08-24T17:21:41Z" Sep 29 19:09:05 crc kubenswrapper[4779]: I0929 19:09:05.228620 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ef5a50f788b42b32f4ec04eedc9f5c9dbcd07f64b430ec684d8a91b414e6c18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb910a5c408a63f6c4391cf2fae320e14d6dc945125b53eefde3b666ef9d0754\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:09:05Z is after 2025-08-24T17:21:41Z" Sep 29 19:09:05 crc kubenswrapper[4779]: I0929 19:09:05.243664 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://15b9c6a3d7c79b30673cd8040a9f3033e2405cf2e29b38d8ba2bee9cbb384794\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:09:05Z is after 2025-08-24T17:21:41Z" Sep 29 19:09:05 crc kubenswrapper[4779]: I0929 19:09:05.249534 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:05 crc kubenswrapper[4779]: I0929 19:09:05.249613 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:05 crc kubenswrapper[4779]: I0929 19:09:05.249635 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:05 crc kubenswrapper[4779]: I0929 19:09:05.249667 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:05 crc kubenswrapper[4779]: I0929 19:09:05.249687 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:05Z","lastTransitionTime":"2025-09-29T19:09:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:05 crc kubenswrapper[4779]: I0929 19:09:05.266128 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jfbb6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3ac24bbf-c37a-4253-be71-8d8f15cfd48e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2ea6959ae00f851559cc059498a3a7848fbbb55f6a25ed82e38efe62a7ee85ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8ckjq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:30Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jfbb6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:09:05Z is after 2025-08-24T17:21:41Z" Sep 29 19:09:05 crc kubenswrapper[4779]: I0929 19:09:05.289372 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-cvx8m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"54a33b8e-b623-4f91-be1d-a38dfcef17d7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://49599fb872eb458cb3fd77da63518f77653f671e82a8fc1db5f9e2b64e3c4d1d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e697e7c54a1b8adfde2dfa86399bfd13d895196c12108ac625bd9b94ad913d1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e697e7c54a1b8adfde2dfa86399bfd13d895196c12108ac625bd9b94ad913d1e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://75932bd27c986408eb2a3ec9aefffa392bf1ede4be0ddeabb9fdcd5fcc56199d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://75932bd27c986408eb2a3ec9aefffa392bf1ede4be0ddeabb9fdcd5fcc56199d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e058271bf65bb6f216298a9fc58b97c76335275829be7e5bec59d92688e83a05\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e058271bf65bb6f216298a9fc58b97c76335275829be7e5bec59d92688e83a05\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://edec2f7ae27e72dcc2eac62281dc9b866cdc31ecbda5e24020043aad86a44784\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://edec2f7ae27e72dcc2eac62281dc9b866cdc31ecbda5e24020043aad86a44784\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1750034ffcb61628b437e7b4f604be66ea6401d0ddb8a70f37fbb358c261670f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1750034ffcb61628b437e7b4f604be66ea6401d0ddb8a70f37fbb358c261670f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3409883fc5d9ed6b5e08d50c9d2821a4ac0b8932c9a0296c12065b0abb0354d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3409883fc5d9ed6b5e08d50c9d2821a4ac0b8932c9a0296c12065b0abb0354d6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:30Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-cvx8m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:09:05Z is after 2025-08-24T17:21:41Z" Sep 29 19:09:05 crc kubenswrapper[4779]: I0929 19:09:05.309842 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"49dd46af-a7fa-47b0-94ab-c9a999564fca\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a67efc46aeb5956568b680b4041d83a55c0410ccefdb4e43b95503c6416e1dfc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2253a3406f6de9463c1c615ad2ac76ce935067baa57b260f18afdb8d9e639e95\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa9b804f65dc9265342d6ec10b8cdd31d49f692289c7fd8292944e25a2c95ebe\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://02428a90ed2dd1c36d800ffda10ed8759d92a87ec8f467cdf9fb11511b9b9420\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:09Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:09:05Z is after 2025-08-24T17:21:41Z" Sep 29 19:09:05 crc kubenswrapper[4779]: I0929 19:09:05.327456 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:09:05Z is after 2025-08-24T17:21:41Z" Sep 29 19:09:05 crc kubenswrapper[4779]: I0929 19:09:05.353023 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:05 crc kubenswrapper[4779]: I0929 19:09:05.353081 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:05 crc kubenswrapper[4779]: I0929 19:09:05.353095 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:05 crc kubenswrapper[4779]: I0929 19:09:05.353117 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:05 crc kubenswrapper[4779]: I0929 19:09:05.353138 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:05Z","lastTransitionTime":"2025-09-29T19:09:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:05 crc kubenswrapper[4779]: I0929 19:09:05.359002 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-42vjg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"046df2ef-fb75-4d32-93e6-17b36af0a7c2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://629d703a556665e86ce3b0a5af995157440c71c8ab7d29b9259865cd624543c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1555e96e70a9119cf2298d91512b1d8cffb303b4327d0e20bdf686e7e36e4461\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0d18ddf7528c789be80d32cd6921ada5ed114d01130b7be97e73ec6d9168267e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b6ffbcb52a6e250cd4b4bc4e8c089ef94babaaf0db836fec9f477450464eebe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://76304a56be996b646d706c0e7df995bb1518962396c7558304b1627e5c215c11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6dcd0277739f6d5e5c90c617485f053c1c3996192f15b081c660741987ec2611\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://23219c41b54182e6c1c57246636336210c37b9a2c9b3beedbb57daee3d909433\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://23219c41b54182e6c1c57246636336210c37b9a2c9b3beedbb57daee3d909433\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T19:09:03Z\\\",\\\"message\\\":\\\"r network=default : 881.553µs\\\\nI0929 19:09:03.467164 6425 obj_retry.go:303] Retry object setup: *v1.Pod openshift-network-operator/network-operator-58b4c7f79c-55gtf\\\\nI0929 19:09:03.466800 6425 services_controller.go:356] Processing sync for service openshift-dns-operator/metrics for network=default\\\\nI0929 19:09:03.467178 6425 obj_retry.go:365] Adding new object: *v1.Pod openshift-network-operator/network-operator-58b4c7f79c-55gtf\\\\nI0929 19:09:03.467185 6425 ovn.go:134] Ensuring zone local for Pod openshift-network-operator/network-operator-58b4c7f79c-55gtf in node crc\\\\nI0929 19:09:03.467190 6425 obj_retry.go:386] Retry successful for *v1.Pod openshift-network-operator/network-operator-58b4c7f79c-55gtf after 0 failed attempt(s)\\\\nI0929 19:09:03.467195 6425 default_network_controller.go:776] Recording success event on pod openshift-network-operator/network-operator-58b4c7f79c-55gtf\\\\nI0929 19:09:03.466832 6425 services_controller.go:434] Service default/kubernetes retrieved from lister for network=default: \\\\u0026Service{ObjectMeta:{kubernetes default 1fcaffea-cfe2-4295-9c2a-a3b3626fb3f1 259 0 2025-02-23 05:11:12 +0000 UTC \\\\u003cnil\\\\u003e \\\\u003cnil\\\\u003e map[component:apiserver provider:kubernetes] map[] [] [] []},Spec:ServiceSpec{Ports:[]ServicePort{ServicePort{Name:https,Protocol:TCP,Port:443,TargetPort:{0 6443 },NodePort:0,AppProtocol:nil,},},Selector:m\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T19:09:02Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-42vjg_openshift-ovn-kubernetes(046df2ef-fb75-4d32-93e6-17b36af0a7c2)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c8993f5be0b77b752f60e7346f609aaeef20db2f0e72cbebde06a11fa7d2f5bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e1e03c63d08ebe2a4b76954140684a5b43da9e92b93490aa7efa715fc2aefb4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e1e03c63d08ebe2a4b76954140684a5b43da9e92b93490aa7efa715fc2aefb4c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:30Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-42vjg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:09:05Z is after 2025-08-24T17:21:41Z" Sep 29 19:09:05 crc kubenswrapper[4779]: I0929 19:09:05.373012 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-zxpg4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e0db9b28-7e3a-4d44-9e98-1a07c8e5b8d6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2f0f837dfb05912fb4f7b0cb08cdb363cb3bfe2cc91a0bebd1b2b2b8128d78cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6ghv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:33Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-zxpg4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:09:05Z is after 2025-08-24T17:21:41Z" Sep 29 19:09:05 crc kubenswrapper[4779]: I0929 19:09:05.391086 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2f701cb-2e7d-4cea-8455-f68605964ac6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dfa32890cde2a0a47211239f797f83380b4418ad2b8fb1560c779705e68df39c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a017f2621de03e0184a0a168819a6762e4b36a1ae5c66a6bcfd1d65266fcb52\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a9691127cc0437ceebef7a82219adb83625690db4d6bf5cf1c8177a6a9e1284\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://19af6b603d6261471bf10fbf65244aa6842b15a02fbbdea09a8ab2d01ec1e1a4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b14ae0b08f077b3acbe802e4a5ec06dd10075100c37d5587119a09a92b4651a\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T19:08:29Z\\\",\\\"message\\\":\\\"le observer\\\\nW0929 19:08:29.408025 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0929 19:08:29.408423 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 19:08:29.411188 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1038548135/tls.crt::/tmp/serving-cert-1038548135/tls.key\\\\\\\"\\\\nI0929 19:08:29.848353 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0929 19:08:29.866865 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0929 19:08:29.866980 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0929 19:08:29.867032 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0929 19:08:29.867061 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0929 19:08:29.872666 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0929 19:08:29.872766 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0929 19:08:29.872793 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0929 19:08:29.872817 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0929 19:08:29.872839 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0929 19:08:29.872861 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0929 19:08:29.872883 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0929 19:08:29.873077 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0929 19:08:29.875629 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:24Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f0916d37ad679d190403229b40390af2860e24f777a07cd5ef0ac2f7d3fc3c41\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5a737775c31cf8dc37838d904ee00f1075bc2aa52bea63587806cb7486950eaf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5a737775c31cf8dc37838d904ee00f1075bc2aa52bea63587806cb7486950eaf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:09Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:09:05Z is after 2025-08-24T17:21:41Z" Sep 29 19:09:05 crc kubenswrapper[4779]: I0929 19:09:05.403676 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fvdtc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"97c4781c-2d4b-4eab-96fb-39a342c2d4a0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c2179d9cf26eb6d6fcc63666d3dd53e9a1a66cf633c9fe0b29e6d6ac726c8119\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7cxcv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3aeed56672d12f2355eda710f9021c6271debfaf1f7b7d6b1ff5f4ecc6764e2e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7cxcv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:43Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-fvdtc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:09:05Z is after 2025-08-24T17:21:41Z" Sep 29 19:09:05 crc kubenswrapper[4779]: I0929 19:09:05.416725 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-7hb2m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"45b89d12-bbd2-4b47-815d-a7421cc1aa00\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://002d12663649198e6136f3e9df16a3b1cd8c64906bf39bde03cd0dde8c36abbc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bvn4g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:30Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-7hb2m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:09:05Z is after 2025-08-24T17:21:41Z" Sep 29 19:09:05 crc kubenswrapper[4779]: I0929 19:09:05.430595 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:09:05Z is after 2025-08-24T17:21:41Z" Sep 29 19:09:05 crc kubenswrapper[4779]: I0929 19:09:05.456092 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:05 crc kubenswrapper[4779]: I0929 19:09:05.456152 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:05 crc kubenswrapper[4779]: I0929 19:09:05.456166 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:05 crc kubenswrapper[4779]: I0929 19:09:05.456188 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:05 crc kubenswrapper[4779]: I0929 19:09:05.456203 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:05Z","lastTransitionTime":"2025-09-29T19:09:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:05 crc kubenswrapper[4779]: I0929 19:09:05.559347 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:05 crc kubenswrapper[4779]: I0929 19:09:05.559393 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:05 crc kubenswrapper[4779]: I0929 19:09:05.559405 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:05 crc kubenswrapper[4779]: I0929 19:09:05.559427 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:05 crc kubenswrapper[4779]: I0929 19:09:05.559441 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:05Z","lastTransitionTime":"2025-09-29T19:09:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:05 crc kubenswrapper[4779]: I0929 19:09:05.662426 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:05 crc kubenswrapper[4779]: I0929 19:09:05.662469 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:05 crc kubenswrapper[4779]: I0929 19:09:05.662483 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:05 crc kubenswrapper[4779]: I0929 19:09:05.662500 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:05 crc kubenswrapper[4779]: I0929 19:09:05.662512 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:05Z","lastTransitionTime":"2025-09-29T19:09:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:05 crc kubenswrapper[4779]: I0929 19:09:05.765199 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:05 crc kubenswrapper[4779]: I0929 19:09:05.765269 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:05 crc kubenswrapper[4779]: I0929 19:09:05.765286 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:05 crc kubenswrapper[4779]: I0929 19:09:05.765312 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:05 crc kubenswrapper[4779]: I0929 19:09:05.765373 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:05Z","lastTransitionTime":"2025-09-29T19:09:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:05 crc kubenswrapper[4779]: I0929 19:09:05.868842 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:05 crc kubenswrapper[4779]: I0929 19:09:05.868919 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:05 crc kubenswrapper[4779]: I0929 19:09:05.868933 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:05 crc kubenswrapper[4779]: I0929 19:09:05.868951 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:05 crc kubenswrapper[4779]: I0929 19:09:05.868983 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:05Z","lastTransitionTime":"2025-09-29T19:09:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:05 crc kubenswrapper[4779]: I0929 19:09:05.972385 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:05 crc kubenswrapper[4779]: I0929 19:09:05.972454 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:05 crc kubenswrapper[4779]: I0929 19:09:05.972465 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:05 crc kubenswrapper[4779]: I0929 19:09:05.972483 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:05 crc kubenswrapper[4779]: I0929 19:09:05.972495 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:05Z","lastTransitionTime":"2025-09-29T19:09:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:06 crc kubenswrapper[4779]: I0929 19:09:06.074934 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:06 crc kubenswrapper[4779]: I0929 19:09:06.075016 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:06 crc kubenswrapper[4779]: I0929 19:09:06.075040 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:06 crc kubenswrapper[4779]: I0929 19:09:06.075073 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:06 crc kubenswrapper[4779]: I0929 19:09:06.075094 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:06Z","lastTransitionTime":"2025-09-29T19:09:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:06 crc kubenswrapper[4779]: I0929 19:09:06.177736 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:06 crc kubenswrapper[4779]: I0929 19:09:06.177797 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:06 crc kubenswrapper[4779]: I0929 19:09:06.177817 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:06 crc kubenswrapper[4779]: I0929 19:09:06.177840 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:06 crc kubenswrapper[4779]: I0929 19:09:06.177856 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:06Z","lastTransitionTime":"2025-09-29T19:09:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:06 crc kubenswrapper[4779]: I0929 19:09:06.281499 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:06 crc kubenswrapper[4779]: I0929 19:09:06.281553 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:06 crc kubenswrapper[4779]: I0929 19:09:06.281570 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:06 crc kubenswrapper[4779]: I0929 19:09:06.281591 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:06 crc kubenswrapper[4779]: I0929 19:09:06.281605 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:06Z","lastTransitionTime":"2025-09-29T19:09:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:06 crc kubenswrapper[4779]: I0929 19:09:06.383477 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:06 crc kubenswrapper[4779]: I0929 19:09:06.383526 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:06 crc kubenswrapper[4779]: I0929 19:09:06.383536 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:06 crc kubenswrapper[4779]: I0929 19:09:06.383549 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:06 crc kubenswrapper[4779]: I0929 19:09:06.383557 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:06Z","lastTransitionTime":"2025-09-29T19:09:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:06 crc kubenswrapper[4779]: I0929 19:09:06.486557 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:06 crc kubenswrapper[4779]: I0929 19:09:06.486637 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:06 crc kubenswrapper[4779]: I0929 19:09:06.486661 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:06 crc kubenswrapper[4779]: I0929 19:09:06.486695 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:06 crc kubenswrapper[4779]: I0929 19:09:06.486716 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:06Z","lastTransitionTime":"2025-09-29T19:09:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:06 crc kubenswrapper[4779]: I0929 19:09:06.545036 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Sep 29 19:09:06 crc kubenswrapper[4779]: I0929 19:09:06.557685 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler/openshift-kube-scheduler-crc"] Sep 29 19:09:06 crc kubenswrapper[4779]: I0929 19:09:06.564698 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jfbb6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3ac24bbf-c37a-4253-be71-8d8f15cfd48e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2ea6959ae00f851559cc059498a3a7848fbbb55f6a25ed82e38efe62a7ee85ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8ckjq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:30Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jfbb6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:09:06Z is after 2025-08-24T17:21:41Z" Sep 29 19:09:06 crc kubenswrapper[4779]: I0929 19:09:06.588317 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-cvx8m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"54a33b8e-b623-4f91-be1d-a38dfcef17d7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://49599fb872eb458cb3fd77da63518f77653f671e82a8fc1db5f9e2b64e3c4d1d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e697e7c54a1b8adfde2dfa86399bfd13d895196c12108ac625bd9b94ad913d1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e697e7c54a1b8adfde2dfa86399bfd13d895196c12108ac625bd9b94ad913d1e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://75932bd27c986408eb2a3ec9aefffa392bf1ede4be0ddeabb9fdcd5fcc56199d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://75932bd27c986408eb2a3ec9aefffa392bf1ede4be0ddeabb9fdcd5fcc56199d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e058271bf65bb6f216298a9fc58b97c76335275829be7e5bec59d92688e83a05\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e058271bf65bb6f216298a9fc58b97c76335275829be7e5bec59d92688e83a05\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://edec2f7ae27e72dcc2eac62281dc9b866cdc31ecbda5e24020043aad86a44784\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://edec2f7ae27e72dcc2eac62281dc9b866cdc31ecbda5e24020043aad86a44784\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1750034ffcb61628b437e7b4f604be66ea6401d0ddb8a70f37fbb358c261670f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1750034ffcb61628b437e7b4f604be66ea6401d0ddb8a70f37fbb358c261670f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3409883fc5d9ed6b5e08d50c9d2821a4ac0b8932c9a0296c12065b0abb0354d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3409883fc5d9ed6b5e08d50c9d2821a4ac0b8932c9a0296c12065b0abb0354d6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:30Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-cvx8m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:09:06Z is after 2025-08-24T17:21:41Z" Sep 29 19:09:06 crc kubenswrapper[4779]: I0929 19:09:06.589951 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:06 crc kubenswrapper[4779]: I0929 19:09:06.590012 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:06 crc kubenswrapper[4779]: I0929 19:09:06.590038 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:06 crc kubenswrapper[4779]: I0929 19:09:06.590069 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:06 crc kubenswrapper[4779]: I0929 19:09:06.590093 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:06Z","lastTransitionTime":"2025-09-29T19:09:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:06 crc kubenswrapper[4779]: I0929 19:09:06.609811 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"49dd46af-a7fa-47b0-94ab-c9a999564fca\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a67efc46aeb5956568b680b4041d83a55c0410ccefdb4e43b95503c6416e1dfc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2253a3406f6de9463c1c615ad2ac76ce935067baa57b260f18afdb8d9e639e95\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa9b804f65dc9265342d6ec10b8cdd31d49f692289c7fd8292944e25a2c95ebe\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://02428a90ed2dd1c36d800ffda10ed8759d92a87ec8f467cdf9fb11511b9b9420\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:09Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:09:06Z is after 2025-08-24T17:21:41Z" Sep 29 19:09:06 crc kubenswrapper[4779]: I0929 19:09:06.630646 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ef5a50f788b42b32f4ec04eedc9f5c9dbcd07f64b430ec684d8a91b414e6c18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb910a5c408a63f6c4391cf2fae320e14d6dc945125b53eefde3b666ef9d0754\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:09:06Z is after 2025-08-24T17:21:41Z" Sep 29 19:09:06 crc kubenswrapper[4779]: I0929 19:09:06.631132 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:06 crc kubenswrapper[4779]: I0929 19:09:06.631202 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:06 crc kubenswrapper[4779]: I0929 19:09:06.631226 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:06 crc kubenswrapper[4779]: I0929 19:09:06.631260 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:06 crc kubenswrapper[4779]: I0929 19:09:06.631285 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:06Z","lastTransitionTime":"2025-09-29T19:09:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:06 crc kubenswrapper[4779]: E0929 19:09:06.644409 4779 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T19:09:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T19:09:06Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T19:09:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T19:09:06Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T19:09:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T19:09:06Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T19:09:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T19:09:06Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c0f08dfd-4d0c-4b55-a30c-6725bfe13689\\\",\\\"systemUUID\\\":\\\"d61591a8-214b-4be1-8c58-e9ade5216b62\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:09:06Z is after 2025-08-24T17:21:41Z" Sep 29 19:09:06 crc kubenswrapper[4779]: I0929 19:09:06.647550 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://15b9c6a3d7c79b30673cd8040a9f3033e2405cf2e29b38d8ba2bee9cbb384794\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:09:06Z is after 2025-08-24T17:21:41Z" Sep 29 19:09:06 crc kubenswrapper[4779]: I0929 19:09:06.648855 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:06 crc kubenswrapper[4779]: I0929 19:09:06.648922 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:06 crc kubenswrapper[4779]: I0929 19:09:06.648948 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:06 crc kubenswrapper[4779]: I0929 19:09:06.648983 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:06 crc kubenswrapper[4779]: I0929 19:09:06.649006 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:06Z","lastTransitionTime":"2025-09-29T19:09:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:06 crc kubenswrapper[4779]: I0929 19:09:06.661938 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-zxpg4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e0db9b28-7e3a-4d44-9e98-1a07c8e5b8d6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2f0f837dfb05912fb4f7b0cb08cdb363cb3bfe2cc91a0bebd1b2b2b8128d78cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6ghv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:33Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-zxpg4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:09:06Z is after 2025-08-24T17:21:41Z" Sep 29 19:09:06 crc kubenswrapper[4779]: E0929 19:09:06.669749 4779 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T19:09:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T19:09:06Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T19:09:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T19:09:06Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T19:09:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T19:09:06Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T19:09:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T19:09:06Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c0f08dfd-4d0c-4b55-a30c-6725bfe13689\\\",\\\"systemUUID\\\":\\\"d61591a8-214b-4be1-8c58-e9ade5216b62\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:09:06Z is after 2025-08-24T17:21:41Z" Sep 29 19:09:06 crc kubenswrapper[4779]: I0929 19:09:06.673229 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:06 crc kubenswrapper[4779]: I0929 19:09:06.673284 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:06 crc kubenswrapper[4779]: I0929 19:09:06.673300 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:06 crc kubenswrapper[4779]: I0929 19:09:06.673323 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:06 crc kubenswrapper[4779]: I0929 19:09:06.673337 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:06Z","lastTransitionTime":"2025-09-29T19:09:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:06 crc kubenswrapper[4779]: I0929 19:09:06.679377 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2f701cb-2e7d-4cea-8455-f68605964ac6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dfa32890cde2a0a47211239f797f83380b4418ad2b8fb1560c779705e68df39c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a017f2621de03e0184a0a168819a6762e4b36a1ae5c66a6bcfd1d65266fcb52\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a9691127cc0437ceebef7a82219adb83625690db4d6bf5cf1c8177a6a9e1284\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://19af6b603d6261471bf10fbf65244aa6842b15a02fbbdea09a8ab2d01ec1e1a4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b14ae0b08f077b3acbe802e4a5ec06dd10075100c37d5587119a09a92b4651a\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T19:08:29Z\\\",\\\"message\\\":\\\"le observer\\\\nW0929 19:08:29.408025 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0929 19:08:29.408423 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 19:08:29.411188 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1038548135/tls.crt::/tmp/serving-cert-1038548135/tls.key\\\\\\\"\\\\nI0929 19:08:29.848353 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0929 19:08:29.866865 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0929 19:08:29.866980 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0929 19:08:29.867032 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0929 19:08:29.867061 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0929 19:08:29.872666 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0929 19:08:29.872766 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0929 19:08:29.872793 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0929 19:08:29.872817 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0929 19:08:29.872839 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0929 19:08:29.872861 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0929 19:08:29.872883 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0929 19:08:29.873077 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0929 19:08:29.875629 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:24Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f0916d37ad679d190403229b40390af2860e24f777a07cd5ef0ac2f7d3fc3c41\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5a737775c31cf8dc37838d904ee00f1075bc2aa52bea63587806cb7486950eaf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5a737775c31cf8dc37838d904ee00f1075bc2aa52bea63587806cb7486950eaf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:09Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:09:06Z is after 2025-08-24T17:21:41Z" Sep 29 19:09:06 crc kubenswrapper[4779]: E0929 19:09:06.687187 4779 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T19:09:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T19:09:06Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T19:09:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T19:09:06Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T19:09:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T19:09:06Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T19:09:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T19:09:06Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c0f08dfd-4d0c-4b55-a30c-6725bfe13689\\\",\\\"systemUUID\\\":\\\"d61591a8-214b-4be1-8c58-e9ade5216b62\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:09:06Z is after 2025-08-24T17:21:41Z" Sep 29 19:09:06 crc kubenswrapper[4779]: I0929 19:09:06.690775 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:06 crc kubenswrapper[4779]: I0929 19:09:06.690813 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:06 crc kubenswrapper[4779]: I0929 19:09:06.690824 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:06 crc kubenswrapper[4779]: I0929 19:09:06.690840 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:06 crc kubenswrapper[4779]: I0929 19:09:06.690851 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:06Z","lastTransitionTime":"2025-09-29T19:09:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:06 crc kubenswrapper[4779]: I0929 19:09:06.693564 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:09:06Z is after 2025-08-24T17:21:41Z" Sep 29 19:09:06 crc kubenswrapper[4779]: E0929 19:09:06.704073 4779 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T19:09:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T19:09:06Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T19:09:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T19:09:06Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T19:09:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T19:09:06Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T19:09:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T19:09:06Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c0f08dfd-4d0c-4b55-a30c-6725bfe13689\\\",\\\"systemUUID\\\":\\\"d61591a8-214b-4be1-8c58-e9ade5216b62\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:09:06Z is after 2025-08-24T17:21:41Z" Sep 29 19:09:06 crc kubenswrapper[4779]: I0929 19:09:06.707467 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:06 crc kubenswrapper[4779]: I0929 19:09:06.707497 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:06 crc kubenswrapper[4779]: I0929 19:09:06.707508 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:06 crc kubenswrapper[4779]: I0929 19:09:06.707524 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:06 crc kubenswrapper[4779]: I0929 19:09:06.707537 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:06Z","lastTransitionTime":"2025-09-29T19:09:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:06 crc kubenswrapper[4779]: E0929 19:09:06.721663 4779 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T19:09:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T19:09:06Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T19:09:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T19:09:06Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T19:09:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T19:09:06Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T19:09:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T19:09:06Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c0f08dfd-4d0c-4b55-a30c-6725bfe13689\\\",\\\"systemUUID\\\":\\\"d61591a8-214b-4be1-8c58-e9ade5216b62\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:09:06Z is after 2025-08-24T17:21:41Z" Sep 29 19:09:06 crc kubenswrapper[4779]: E0929 19:09:06.721923 4779 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Sep 29 19:09:06 crc kubenswrapper[4779]: I0929 19:09:06.724500 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:06 crc kubenswrapper[4779]: I0929 19:09:06.724637 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:06 crc kubenswrapper[4779]: I0929 19:09:06.724728 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:06 crc kubenswrapper[4779]: I0929 19:09:06.724819 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:06 crc kubenswrapper[4779]: I0929 19:09:06.724899 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:06Z","lastTransitionTime":"2025-09-29T19:09:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:06 crc kubenswrapper[4779]: I0929 19:09:06.724737 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-42vjg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"046df2ef-fb75-4d32-93e6-17b36af0a7c2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://629d703a556665e86ce3b0a5af995157440c71c8ab7d29b9259865cd624543c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1555e96e70a9119cf2298d91512b1d8cffb303b4327d0e20bdf686e7e36e4461\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0d18ddf7528c789be80d32cd6921ada5ed114d01130b7be97e73ec6d9168267e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b6ffbcb52a6e250cd4b4bc4e8c089ef94babaaf0db836fec9f477450464eebe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://76304a56be996b646d706c0e7df995bb1518962396c7558304b1627e5c215c11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6dcd0277739f6d5e5c90c617485f053c1c3996192f15b081c660741987ec2611\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://23219c41b54182e6c1c57246636336210c37b9a2c9b3beedbb57daee3d909433\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://23219c41b54182e6c1c57246636336210c37b9a2c9b3beedbb57daee3d909433\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T19:09:03Z\\\",\\\"message\\\":\\\"r network=default : 881.553µs\\\\nI0929 19:09:03.467164 6425 obj_retry.go:303] Retry object setup: *v1.Pod openshift-network-operator/network-operator-58b4c7f79c-55gtf\\\\nI0929 19:09:03.466800 6425 services_controller.go:356] Processing sync for service openshift-dns-operator/metrics for network=default\\\\nI0929 19:09:03.467178 6425 obj_retry.go:365] Adding new object: *v1.Pod openshift-network-operator/network-operator-58b4c7f79c-55gtf\\\\nI0929 19:09:03.467185 6425 ovn.go:134] Ensuring zone local for Pod openshift-network-operator/network-operator-58b4c7f79c-55gtf in node crc\\\\nI0929 19:09:03.467190 6425 obj_retry.go:386] Retry successful for *v1.Pod openshift-network-operator/network-operator-58b4c7f79c-55gtf after 0 failed attempt(s)\\\\nI0929 19:09:03.467195 6425 default_network_controller.go:776] Recording success event on pod openshift-network-operator/network-operator-58b4c7f79c-55gtf\\\\nI0929 19:09:03.466832 6425 services_controller.go:434] Service default/kubernetes retrieved from lister for network=default: \\\\u0026Service{ObjectMeta:{kubernetes default 1fcaffea-cfe2-4295-9c2a-a3b3626fb3f1 259 0 2025-02-23 05:11:12 +0000 UTC \\\\u003cnil\\\\u003e \\\\u003cnil\\\\u003e map[component:apiserver provider:kubernetes] map[] [] [] []},Spec:ServiceSpec{Ports:[]ServicePort{ServicePort{Name:https,Protocol:TCP,Port:443,TargetPort:{0 6443 },NodePort:0,AppProtocol:nil,},},Selector:m\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T19:09:02Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-42vjg_openshift-ovn-kubernetes(046df2ef-fb75-4d32-93e6-17b36af0a7c2)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c8993f5be0b77b752f60e7346f609aaeef20db2f0e72cbebde06a11fa7d2f5bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e1e03c63d08ebe2a4b76954140684a5b43da9e92b93490aa7efa715fc2aefb4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e1e03c63d08ebe2a4b76954140684a5b43da9e92b93490aa7efa715fc2aefb4c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:30Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-42vjg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:09:06Z is after 2025-08-24T17:21:41Z" Sep 29 19:09:06 crc kubenswrapper[4779]: I0929 19:09:06.735758 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-7hb2m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"45b89d12-bbd2-4b47-815d-a7421cc1aa00\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://002d12663649198e6136f3e9df16a3b1cd8c64906bf39bde03cd0dde8c36abbc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bvn4g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:30Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-7hb2m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:09:06Z is after 2025-08-24T17:21:41Z" Sep 29 19:09:06 crc kubenswrapper[4779]: I0929 19:09:06.748379 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:09:06Z is after 2025-08-24T17:21:41Z" Sep 29 19:09:06 crc kubenswrapper[4779]: I0929 19:09:06.762817 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fvdtc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"97c4781c-2d4b-4eab-96fb-39a342c2d4a0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c2179d9cf26eb6d6fcc63666d3dd53e9a1a66cf633c9fe0b29e6d6ac726c8119\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7cxcv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3aeed56672d12f2355eda710f9021c6271debfaf1f7b7d6b1ff5f4ecc6764e2e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7cxcv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:43Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-fvdtc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:09:06Z is after 2025-08-24T17:21:41Z" Sep 29 19:09:06 crc kubenswrapper[4779]: I0929 19:09:06.765612 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-2rtwf" Sep 29 19:09:06 crc kubenswrapper[4779]: E0929 19:09:06.766139 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-2rtwf" podUID="4df079c4-34e3-4132-91bb-ad68488552f8" Sep 29 19:09:06 crc kubenswrapper[4779]: I0929 19:09:06.765616 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 19:09:06 crc kubenswrapper[4779]: E0929 19:09:06.766405 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 19:09:06 crc kubenswrapper[4779]: I0929 19:09:06.765610 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 19:09:06 crc kubenswrapper[4779]: E0929 19:09:06.766632 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 19:09:06 crc kubenswrapper[4779]: I0929 19:09:06.765661 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 19:09:06 crc kubenswrapper[4779]: E0929 19:09:06.766844 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 19:09:06 crc kubenswrapper[4779]: I0929 19:09:06.778549 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-2rtwf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4df079c4-34e3-4132-91bb-ad68488552f8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7sr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7sr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:44Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-2rtwf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:09:06Z is after 2025-08-24T17:21:41Z" Sep 29 19:09:06 crc kubenswrapper[4779]: I0929 19:09:06.792854 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:09:06Z is after 2025-08-24T17:21:41Z" Sep 29 19:09:06 crc kubenswrapper[4779]: I0929 19:09:06.806073 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://04d9dd8d319497dd3ed7c565217d8b9d228a1952a3cd0e8dee3cdad781c5770a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:09:06Z is after 2025-08-24T17:21:41Z" Sep 29 19:09:06 crc kubenswrapper[4779]: I0929 19:09:06.818807 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"476bc421-1113-455e-bcc8-e207e47dad19\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://30844145987b3dae28d2ca3f75a36e21c27216b267ff40fdb83094b8045403c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tb7zk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7168ecfffda2ef4f7782650466ccebb8abcff3293fcd33d44eb4ee24abf6339c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tb7zk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:30Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-d5cnr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:09:06Z is after 2025-08-24T17:21:41Z" Sep 29 19:09:06 crc kubenswrapper[4779]: I0929 19:09:06.827700 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:06 crc kubenswrapper[4779]: I0929 19:09:06.827900 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:06 crc kubenswrapper[4779]: I0929 19:09:06.827997 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:06 crc kubenswrapper[4779]: I0929 19:09:06.828128 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:06 crc kubenswrapper[4779]: I0929 19:09:06.828221 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:06Z","lastTransitionTime":"2025-09-29T19:09:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:06 crc kubenswrapper[4779]: I0929 19:09:06.931465 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:06 crc kubenswrapper[4779]: I0929 19:09:06.931526 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:06 crc kubenswrapper[4779]: I0929 19:09:06.931542 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:06 crc kubenswrapper[4779]: I0929 19:09:06.931565 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:06 crc kubenswrapper[4779]: I0929 19:09:06.931583 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:06Z","lastTransitionTime":"2025-09-29T19:09:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:07 crc kubenswrapper[4779]: I0929 19:09:07.035633 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:07 crc kubenswrapper[4779]: I0929 19:09:07.035681 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:07 crc kubenswrapper[4779]: I0929 19:09:07.035690 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:07 crc kubenswrapper[4779]: I0929 19:09:07.035705 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:07 crc kubenswrapper[4779]: I0929 19:09:07.035714 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:07Z","lastTransitionTime":"2025-09-29T19:09:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:07 crc kubenswrapper[4779]: I0929 19:09:07.138154 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:07 crc kubenswrapper[4779]: I0929 19:09:07.138181 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:07 crc kubenswrapper[4779]: I0929 19:09:07.138189 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:07 crc kubenswrapper[4779]: I0929 19:09:07.138200 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:07 crc kubenswrapper[4779]: I0929 19:09:07.138209 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:07Z","lastTransitionTime":"2025-09-29T19:09:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:07 crc kubenswrapper[4779]: I0929 19:09:07.241398 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:07 crc kubenswrapper[4779]: I0929 19:09:07.241494 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:07 crc kubenswrapper[4779]: I0929 19:09:07.241514 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:07 crc kubenswrapper[4779]: I0929 19:09:07.241542 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:07 crc kubenswrapper[4779]: I0929 19:09:07.241558 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:07Z","lastTransitionTime":"2025-09-29T19:09:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:07 crc kubenswrapper[4779]: I0929 19:09:07.344590 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:07 crc kubenswrapper[4779]: I0929 19:09:07.344634 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:07 crc kubenswrapper[4779]: I0929 19:09:07.344644 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:07 crc kubenswrapper[4779]: I0929 19:09:07.344660 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:07 crc kubenswrapper[4779]: I0929 19:09:07.344672 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:07Z","lastTransitionTime":"2025-09-29T19:09:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:07 crc kubenswrapper[4779]: I0929 19:09:07.447639 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:07 crc kubenswrapper[4779]: I0929 19:09:07.447701 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:07 crc kubenswrapper[4779]: I0929 19:09:07.447718 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:07 crc kubenswrapper[4779]: I0929 19:09:07.447742 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:07 crc kubenswrapper[4779]: I0929 19:09:07.447759 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:07Z","lastTransitionTime":"2025-09-29T19:09:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:07 crc kubenswrapper[4779]: I0929 19:09:07.550421 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:07 crc kubenswrapper[4779]: I0929 19:09:07.550462 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:07 crc kubenswrapper[4779]: I0929 19:09:07.550470 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:07 crc kubenswrapper[4779]: I0929 19:09:07.550506 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:07 crc kubenswrapper[4779]: I0929 19:09:07.550517 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:07Z","lastTransitionTime":"2025-09-29T19:09:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:07 crc kubenswrapper[4779]: I0929 19:09:07.653630 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:07 crc kubenswrapper[4779]: I0929 19:09:07.653692 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:07 crc kubenswrapper[4779]: I0929 19:09:07.653709 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:07 crc kubenswrapper[4779]: I0929 19:09:07.653734 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:07 crc kubenswrapper[4779]: I0929 19:09:07.653752 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:07Z","lastTransitionTime":"2025-09-29T19:09:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:07 crc kubenswrapper[4779]: I0929 19:09:07.757392 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:07 crc kubenswrapper[4779]: I0929 19:09:07.757461 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:07 crc kubenswrapper[4779]: I0929 19:09:07.757500 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:07 crc kubenswrapper[4779]: I0929 19:09:07.757530 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:07 crc kubenswrapper[4779]: I0929 19:09:07.757548 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:07Z","lastTransitionTime":"2025-09-29T19:09:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:07 crc kubenswrapper[4779]: I0929 19:09:07.860530 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:07 crc kubenswrapper[4779]: I0929 19:09:07.860577 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:07 crc kubenswrapper[4779]: I0929 19:09:07.860592 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:07 crc kubenswrapper[4779]: I0929 19:09:07.860620 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:07 crc kubenswrapper[4779]: I0929 19:09:07.860636 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:07Z","lastTransitionTime":"2025-09-29T19:09:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:07 crc kubenswrapper[4779]: I0929 19:09:07.963797 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:07 crc kubenswrapper[4779]: I0929 19:09:07.963859 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:07 crc kubenswrapper[4779]: I0929 19:09:07.963878 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:07 crc kubenswrapper[4779]: I0929 19:09:07.963903 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:07 crc kubenswrapper[4779]: I0929 19:09:07.963921 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:07Z","lastTransitionTime":"2025-09-29T19:09:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:08 crc kubenswrapper[4779]: I0929 19:09:08.066682 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:08 crc kubenswrapper[4779]: I0929 19:09:08.066758 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:08 crc kubenswrapper[4779]: I0929 19:09:08.066774 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:08 crc kubenswrapper[4779]: I0929 19:09:08.066802 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:08 crc kubenswrapper[4779]: I0929 19:09:08.066818 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:08Z","lastTransitionTime":"2025-09-29T19:09:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:08 crc kubenswrapper[4779]: I0929 19:09:08.169696 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:08 crc kubenswrapper[4779]: I0929 19:09:08.169742 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:08 crc kubenswrapper[4779]: I0929 19:09:08.169754 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:08 crc kubenswrapper[4779]: I0929 19:09:08.169770 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:08 crc kubenswrapper[4779]: I0929 19:09:08.169782 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:08Z","lastTransitionTime":"2025-09-29T19:09:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:08 crc kubenswrapper[4779]: I0929 19:09:08.272651 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:08 crc kubenswrapper[4779]: I0929 19:09:08.272693 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:08 crc kubenswrapper[4779]: I0929 19:09:08.272729 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:08 crc kubenswrapper[4779]: I0929 19:09:08.272760 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:08 crc kubenswrapper[4779]: I0929 19:09:08.272772 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:08Z","lastTransitionTime":"2025-09-29T19:09:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:08 crc kubenswrapper[4779]: I0929 19:09:08.375429 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:08 crc kubenswrapper[4779]: I0929 19:09:08.375489 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:08 crc kubenswrapper[4779]: I0929 19:09:08.375541 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:08 crc kubenswrapper[4779]: I0929 19:09:08.375564 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:08 crc kubenswrapper[4779]: I0929 19:09:08.375579 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:08Z","lastTransitionTime":"2025-09-29T19:09:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:08 crc kubenswrapper[4779]: I0929 19:09:08.479156 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:08 crc kubenswrapper[4779]: I0929 19:09:08.479210 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:08 crc kubenswrapper[4779]: I0929 19:09:08.479227 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:08 crc kubenswrapper[4779]: I0929 19:09:08.479252 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:08 crc kubenswrapper[4779]: I0929 19:09:08.479270 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:08Z","lastTransitionTime":"2025-09-29T19:09:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:08 crc kubenswrapper[4779]: I0929 19:09:08.581734 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:08 crc kubenswrapper[4779]: I0929 19:09:08.581795 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:08 crc kubenswrapper[4779]: I0929 19:09:08.581812 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:08 crc kubenswrapper[4779]: I0929 19:09:08.581835 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:08 crc kubenswrapper[4779]: I0929 19:09:08.581852 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:08Z","lastTransitionTime":"2025-09-29T19:09:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:08 crc kubenswrapper[4779]: I0929 19:09:08.684902 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:08 crc kubenswrapper[4779]: I0929 19:09:08.684976 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:08 crc kubenswrapper[4779]: I0929 19:09:08.684989 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:08 crc kubenswrapper[4779]: I0929 19:09:08.685012 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:08 crc kubenswrapper[4779]: I0929 19:09:08.685048 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:08Z","lastTransitionTime":"2025-09-29T19:09:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:08 crc kubenswrapper[4779]: I0929 19:09:08.765237 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 19:09:08 crc kubenswrapper[4779]: I0929 19:09:08.765285 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 19:09:08 crc kubenswrapper[4779]: I0929 19:09:08.765310 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-2rtwf" Sep 29 19:09:08 crc kubenswrapper[4779]: I0929 19:09:08.765285 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 19:09:08 crc kubenswrapper[4779]: E0929 19:09:08.765411 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 19:09:08 crc kubenswrapper[4779]: E0929 19:09:08.765562 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 19:09:08 crc kubenswrapper[4779]: E0929 19:09:08.765639 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-2rtwf" podUID="4df079c4-34e3-4132-91bb-ad68488552f8" Sep 29 19:09:08 crc kubenswrapper[4779]: E0929 19:09:08.765699 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 19:09:08 crc kubenswrapper[4779]: I0929 19:09:08.788114 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:08 crc kubenswrapper[4779]: I0929 19:09:08.788530 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:08 crc kubenswrapper[4779]: I0929 19:09:08.788721 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:08 crc kubenswrapper[4779]: I0929 19:09:08.788934 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:08 crc kubenswrapper[4779]: I0929 19:09:08.789128 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:08Z","lastTransitionTime":"2025-09-29T19:09:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:08 crc kubenswrapper[4779]: I0929 19:09:08.891986 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:08 crc kubenswrapper[4779]: I0929 19:09:08.892054 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:08 crc kubenswrapper[4779]: I0929 19:09:08.892072 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:08 crc kubenswrapper[4779]: I0929 19:09:08.892107 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:08 crc kubenswrapper[4779]: I0929 19:09:08.892123 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:08Z","lastTransitionTime":"2025-09-29T19:09:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:08 crc kubenswrapper[4779]: I0929 19:09:08.995566 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:08 crc kubenswrapper[4779]: I0929 19:09:08.995623 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:08 crc kubenswrapper[4779]: I0929 19:09:08.995640 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:08 crc kubenswrapper[4779]: I0929 19:09:08.995662 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:08 crc kubenswrapper[4779]: I0929 19:09:08.995678 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:08Z","lastTransitionTime":"2025-09-29T19:09:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:09 crc kubenswrapper[4779]: I0929 19:09:09.099618 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:09 crc kubenswrapper[4779]: I0929 19:09:09.099664 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:09 crc kubenswrapper[4779]: I0929 19:09:09.099673 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:09 crc kubenswrapper[4779]: I0929 19:09:09.099688 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:09 crc kubenswrapper[4779]: I0929 19:09:09.099698 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:09Z","lastTransitionTime":"2025-09-29T19:09:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:09 crc kubenswrapper[4779]: I0929 19:09:09.203214 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:09 crc kubenswrapper[4779]: I0929 19:09:09.203271 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:09 crc kubenswrapper[4779]: I0929 19:09:09.203286 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:09 crc kubenswrapper[4779]: I0929 19:09:09.203305 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:09 crc kubenswrapper[4779]: I0929 19:09:09.203444 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:09Z","lastTransitionTime":"2025-09-29T19:09:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:09 crc kubenswrapper[4779]: I0929 19:09:09.307435 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:09 crc kubenswrapper[4779]: I0929 19:09:09.307521 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:09 crc kubenswrapper[4779]: I0929 19:09:09.307538 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:09 crc kubenswrapper[4779]: I0929 19:09:09.307570 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:09 crc kubenswrapper[4779]: I0929 19:09:09.307623 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:09Z","lastTransitionTime":"2025-09-29T19:09:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:09 crc kubenswrapper[4779]: I0929 19:09:09.410466 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:09 crc kubenswrapper[4779]: I0929 19:09:09.410514 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:09 crc kubenswrapper[4779]: I0929 19:09:09.410530 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:09 crc kubenswrapper[4779]: I0929 19:09:09.410552 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:09 crc kubenswrapper[4779]: I0929 19:09:09.410570 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:09Z","lastTransitionTime":"2025-09-29T19:09:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:09 crc kubenswrapper[4779]: I0929 19:09:09.514083 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:09 crc kubenswrapper[4779]: I0929 19:09:09.514149 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:09 crc kubenswrapper[4779]: I0929 19:09:09.514165 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:09 crc kubenswrapper[4779]: I0929 19:09:09.514185 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:09 crc kubenswrapper[4779]: I0929 19:09:09.514200 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:09Z","lastTransitionTime":"2025-09-29T19:09:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:09 crc kubenswrapper[4779]: I0929 19:09:09.617737 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:09 crc kubenswrapper[4779]: I0929 19:09:09.618277 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:09 crc kubenswrapper[4779]: I0929 19:09:09.618479 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:09 crc kubenswrapper[4779]: I0929 19:09:09.618642 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:09 crc kubenswrapper[4779]: I0929 19:09:09.618767 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:09Z","lastTransitionTime":"2025-09-29T19:09:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:09 crc kubenswrapper[4779]: I0929 19:09:09.721587 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:09 crc kubenswrapper[4779]: I0929 19:09:09.721647 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:09 crc kubenswrapper[4779]: I0929 19:09:09.721664 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:09 crc kubenswrapper[4779]: I0929 19:09:09.721687 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:09 crc kubenswrapper[4779]: I0929 19:09:09.721705 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:09Z","lastTransitionTime":"2025-09-29T19:09:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:09 crc kubenswrapper[4779]: I0929 19:09:09.786317 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:09:09Z is after 2025-08-24T17:21:41Z" Sep 29 19:09:09 crc kubenswrapper[4779]: I0929 19:09:09.804511 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://04d9dd8d319497dd3ed7c565217d8b9d228a1952a3cd0e8dee3cdad781c5770a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:09:09Z is after 2025-08-24T17:21:41Z" Sep 29 19:09:09 crc kubenswrapper[4779]: I0929 19:09:09.817742 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"476bc421-1113-455e-bcc8-e207e47dad19\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://30844145987b3dae28d2ca3f75a36e21c27216b267ff40fdb83094b8045403c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tb7zk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7168ecfffda2ef4f7782650466ccebb8abcff3293fcd33d44eb4ee24abf6339c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tb7zk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:30Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-d5cnr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:09:09Z is after 2025-08-24T17:21:41Z" Sep 29 19:09:09 crc kubenswrapper[4779]: I0929 19:09:09.824651 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:09 crc kubenswrapper[4779]: I0929 19:09:09.824700 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:09 crc kubenswrapper[4779]: I0929 19:09:09.824716 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:09 crc kubenswrapper[4779]: I0929 19:09:09.824780 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:09 crc kubenswrapper[4779]: I0929 19:09:09.824800 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:09Z","lastTransitionTime":"2025-09-29T19:09:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:09 crc kubenswrapper[4779]: I0929 19:09:09.829449 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-2rtwf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4df079c4-34e3-4132-91bb-ad68488552f8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7sr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7sr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:44Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-2rtwf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:09:09Z is after 2025-08-24T17:21:41Z" Sep 29 19:09:09 crc kubenswrapper[4779]: I0929 19:09:09.843629 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"49dd46af-a7fa-47b0-94ab-c9a999564fca\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a67efc46aeb5956568b680b4041d83a55c0410ccefdb4e43b95503c6416e1dfc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2253a3406f6de9463c1c615ad2ac76ce935067baa57b260f18afdb8d9e639e95\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa9b804f65dc9265342d6ec10b8cdd31d49f692289c7fd8292944e25a2c95ebe\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://02428a90ed2dd1c36d800ffda10ed8759d92a87ec8f467cdf9fb11511b9b9420\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:09Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:09:09Z is after 2025-08-24T17:21:41Z" Sep 29 19:09:09 crc kubenswrapper[4779]: I0929 19:09:09.859717 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ef5a50f788b42b32f4ec04eedc9f5c9dbcd07f64b430ec684d8a91b414e6c18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb910a5c408a63f6c4391cf2fae320e14d6dc945125b53eefde3b666ef9d0754\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:09:09Z is after 2025-08-24T17:21:41Z" Sep 29 19:09:09 crc kubenswrapper[4779]: I0929 19:09:09.870755 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://15b9c6a3d7c79b30673cd8040a9f3033e2405cf2e29b38d8ba2bee9cbb384794\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:09:09Z is after 2025-08-24T17:21:41Z" Sep 29 19:09:09 crc kubenswrapper[4779]: I0929 19:09:09.884790 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jfbb6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3ac24bbf-c37a-4253-be71-8d8f15cfd48e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2ea6959ae00f851559cc059498a3a7848fbbb55f6a25ed82e38efe62a7ee85ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8ckjq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:30Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jfbb6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:09:09Z is after 2025-08-24T17:21:41Z" Sep 29 19:09:09 crc kubenswrapper[4779]: I0929 19:09:09.900730 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-cvx8m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"54a33b8e-b623-4f91-be1d-a38dfcef17d7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://49599fb872eb458cb3fd77da63518f77653f671e82a8fc1db5f9e2b64e3c4d1d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e697e7c54a1b8adfde2dfa86399bfd13d895196c12108ac625bd9b94ad913d1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e697e7c54a1b8adfde2dfa86399bfd13d895196c12108ac625bd9b94ad913d1e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://75932bd27c986408eb2a3ec9aefffa392bf1ede4be0ddeabb9fdcd5fcc56199d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://75932bd27c986408eb2a3ec9aefffa392bf1ede4be0ddeabb9fdcd5fcc56199d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e058271bf65bb6f216298a9fc58b97c76335275829be7e5bec59d92688e83a05\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e058271bf65bb6f216298a9fc58b97c76335275829be7e5bec59d92688e83a05\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://edec2f7ae27e72dcc2eac62281dc9b866cdc31ecbda5e24020043aad86a44784\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://edec2f7ae27e72dcc2eac62281dc9b866cdc31ecbda5e24020043aad86a44784\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1750034ffcb61628b437e7b4f604be66ea6401d0ddb8a70f37fbb358c261670f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1750034ffcb61628b437e7b4f604be66ea6401d0ddb8a70f37fbb358c261670f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3409883fc5d9ed6b5e08d50c9d2821a4ac0b8932c9a0296c12065b0abb0354d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3409883fc5d9ed6b5e08d50c9d2821a4ac0b8932c9a0296c12065b0abb0354d6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:30Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-cvx8m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:09:09Z is after 2025-08-24T17:21:41Z" Sep 29 19:09:09 crc kubenswrapper[4779]: I0929 19:09:09.911204 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"35c2e46d-85f5-4dbc-b75c-ff3cab6cc941\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:09:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:09:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://341b30347851b6ce8681296d5622e32df85756f3a372075489023227f0b7828b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://72b9d027e1821e0de6cae982b60001ce62087f205bf5553d9d65c6cfe2d6e0c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://703b4d9d7a59ba642408a7920fd8c2753f1474f28acaa371e0a6b90c75bb7e91\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6fcb83ea7913513031d2c874a33ec806cf823bf8e3838378db3d9c94ca9229a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6fcb83ea7913513031d2c874a33ec806cf823bf8e3838378db3d9c94ca9229a6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:10Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:09Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:09:09Z is after 2025-08-24T17:21:41Z" Sep 29 19:09:09 crc kubenswrapper[4779]: I0929 19:09:09.922261 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2f701cb-2e7d-4cea-8455-f68605964ac6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dfa32890cde2a0a47211239f797f83380b4418ad2b8fb1560c779705e68df39c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a017f2621de03e0184a0a168819a6762e4b36a1ae5c66a6bcfd1d65266fcb52\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a9691127cc0437ceebef7a82219adb83625690db4d6bf5cf1c8177a6a9e1284\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://19af6b603d6261471bf10fbf65244aa6842b15a02fbbdea09a8ab2d01ec1e1a4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b14ae0b08f077b3acbe802e4a5ec06dd10075100c37d5587119a09a92b4651a\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T19:08:29Z\\\",\\\"message\\\":\\\"le observer\\\\nW0929 19:08:29.408025 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0929 19:08:29.408423 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 19:08:29.411188 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1038548135/tls.crt::/tmp/serving-cert-1038548135/tls.key\\\\\\\"\\\\nI0929 19:08:29.848353 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0929 19:08:29.866865 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0929 19:08:29.866980 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0929 19:08:29.867032 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0929 19:08:29.867061 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0929 19:08:29.872666 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0929 19:08:29.872766 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0929 19:08:29.872793 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0929 19:08:29.872817 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0929 19:08:29.872839 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0929 19:08:29.872861 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0929 19:08:29.872883 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0929 19:08:29.873077 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0929 19:08:29.875629 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:24Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f0916d37ad679d190403229b40390af2860e24f777a07cd5ef0ac2f7d3fc3c41\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5a737775c31cf8dc37838d904ee00f1075bc2aa52bea63587806cb7486950eaf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5a737775c31cf8dc37838d904ee00f1075bc2aa52bea63587806cb7486950eaf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:09Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:09:09Z is after 2025-08-24T17:21:41Z" Sep 29 19:09:09 crc kubenswrapper[4779]: I0929 19:09:09.927301 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:09 crc kubenswrapper[4779]: I0929 19:09:09.927371 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:09 crc kubenswrapper[4779]: I0929 19:09:09.927388 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:09 crc kubenswrapper[4779]: I0929 19:09:09.927406 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:09 crc kubenswrapper[4779]: I0929 19:09:09.927415 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:09Z","lastTransitionTime":"2025-09-29T19:09:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:09 crc kubenswrapper[4779]: I0929 19:09:09.934694 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:09:09Z is after 2025-08-24T17:21:41Z" Sep 29 19:09:09 crc kubenswrapper[4779]: I0929 19:09:09.952373 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-42vjg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"046df2ef-fb75-4d32-93e6-17b36af0a7c2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://629d703a556665e86ce3b0a5af995157440c71c8ab7d29b9259865cd624543c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1555e96e70a9119cf2298d91512b1d8cffb303b4327d0e20bdf686e7e36e4461\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0d18ddf7528c789be80d32cd6921ada5ed114d01130b7be97e73ec6d9168267e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b6ffbcb52a6e250cd4b4bc4e8c089ef94babaaf0db836fec9f477450464eebe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://76304a56be996b646d706c0e7df995bb1518962396c7558304b1627e5c215c11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6dcd0277739f6d5e5c90c617485f053c1c3996192f15b081c660741987ec2611\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://23219c41b54182e6c1c57246636336210c37b9a2c9b3beedbb57daee3d909433\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://23219c41b54182e6c1c57246636336210c37b9a2c9b3beedbb57daee3d909433\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T19:09:03Z\\\",\\\"message\\\":\\\"r network=default : 881.553µs\\\\nI0929 19:09:03.467164 6425 obj_retry.go:303] Retry object setup: *v1.Pod openshift-network-operator/network-operator-58b4c7f79c-55gtf\\\\nI0929 19:09:03.466800 6425 services_controller.go:356] Processing sync for service openshift-dns-operator/metrics for network=default\\\\nI0929 19:09:03.467178 6425 obj_retry.go:365] Adding new object: *v1.Pod openshift-network-operator/network-operator-58b4c7f79c-55gtf\\\\nI0929 19:09:03.467185 6425 ovn.go:134] Ensuring zone local for Pod openshift-network-operator/network-operator-58b4c7f79c-55gtf in node crc\\\\nI0929 19:09:03.467190 6425 obj_retry.go:386] Retry successful for *v1.Pod openshift-network-operator/network-operator-58b4c7f79c-55gtf after 0 failed attempt(s)\\\\nI0929 19:09:03.467195 6425 default_network_controller.go:776] Recording success event on pod openshift-network-operator/network-operator-58b4c7f79c-55gtf\\\\nI0929 19:09:03.466832 6425 services_controller.go:434] Service default/kubernetes retrieved from lister for network=default: \\\\u0026Service{ObjectMeta:{kubernetes default 1fcaffea-cfe2-4295-9c2a-a3b3626fb3f1 259 0 2025-02-23 05:11:12 +0000 UTC \\\\u003cnil\\\\u003e \\\\u003cnil\\\\u003e map[component:apiserver provider:kubernetes] map[] [] [] []},Spec:ServiceSpec{Ports:[]ServicePort{ServicePort{Name:https,Protocol:TCP,Port:443,TargetPort:{0 6443 },NodePort:0,AppProtocol:nil,},},Selector:m\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T19:09:02Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-42vjg_openshift-ovn-kubernetes(046df2ef-fb75-4d32-93e6-17b36af0a7c2)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c8993f5be0b77b752f60e7346f609aaeef20db2f0e72cbebde06a11fa7d2f5bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e1e03c63d08ebe2a4b76954140684a5b43da9e92b93490aa7efa715fc2aefb4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e1e03c63d08ebe2a4b76954140684a5b43da9e92b93490aa7efa715fc2aefb4c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:30Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-42vjg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:09:09Z is after 2025-08-24T17:21:41Z" Sep 29 19:09:09 crc kubenswrapper[4779]: I0929 19:09:09.963348 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-zxpg4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e0db9b28-7e3a-4d44-9e98-1a07c8e5b8d6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2f0f837dfb05912fb4f7b0cb08cdb363cb3bfe2cc91a0bebd1b2b2b8128d78cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6ghv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:33Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-zxpg4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:09:09Z is after 2025-08-24T17:21:41Z" Sep 29 19:09:09 crc kubenswrapper[4779]: I0929 19:09:09.973414 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-7hb2m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"45b89d12-bbd2-4b47-815d-a7421cc1aa00\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://002d12663649198e6136f3e9df16a3b1cd8c64906bf39bde03cd0dde8c36abbc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bvn4g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:30Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-7hb2m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:09:09Z is after 2025-08-24T17:21:41Z" Sep 29 19:09:09 crc kubenswrapper[4779]: I0929 19:09:09.985743 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:09:09Z is after 2025-08-24T17:21:41Z" Sep 29 19:09:09 crc kubenswrapper[4779]: I0929 19:09:09.999355 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fvdtc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"97c4781c-2d4b-4eab-96fb-39a342c2d4a0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c2179d9cf26eb6d6fcc63666d3dd53e9a1a66cf633c9fe0b29e6d6ac726c8119\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7cxcv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3aeed56672d12f2355eda710f9021c6271debfaf1f7b7d6b1ff5f4ecc6764e2e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7cxcv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:43Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-fvdtc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:09:09Z is after 2025-08-24T17:21:41Z" Sep 29 19:09:10 crc kubenswrapper[4779]: I0929 19:09:10.030144 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:10 crc kubenswrapper[4779]: I0929 19:09:10.030187 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:10 crc kubenswrapper[4779]: I0929 19:09:10.030195 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:10 crc kubenswrapper[4779]: I0929 19:09:10.030210 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:10 crc kubenswrapper[4779]: I0929 19:09:10.030220 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:10Z","lastTransitionTime":"2025-09-29T19:09:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:10 crc kubenswrapper[4779]: I0929 19:09:10.132872 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:10 crc kubenswrapper[4779]: I0929 19:09:10.132929 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:10 crc kubenswrapper[4779]: I0929 19:09:10.132942 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:10 crc kubenswrapper[4779]: I0929 19:09:10.132958 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:10 crc kubenswrapper[4779]: I0929 19:09:10.132971 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:10Z","lastTransitionTime":"2025-09-29T19:09:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:10 crc kubenswrapper[4779]: I0929 19:09:10.236138 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:10 crc kubenswrapper[4779]: I0929 19:09:10.236187 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:10 crc kubenswrapper[4779]: I0929 19:09:10.236199 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:10 crc kubenswrapper[4779]: I0929 19:09:10.236218 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:10 crc kubenswrapper[4779]: I0929 19:09:10.236232 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:10Z","lastTransitionTime":"2025-09-29T19:09:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:10 crc kubenswrapper[4779]: I0929 19:09:10.339510 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:10 crc kubenswrapper[4779]: I0929 19:09:10.339559 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:10 crc kubenswrapper[4779]: I0929 19:09:10.339569 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:10 crc kubenswrapper[4779]: I0929 19:09:10.339584 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:10 crc kubenswrapper[4779]: I0929 19:09:10.339596 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:10Z","lastTransitionTime":"2025-09-29T19:09:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:10 crc kubenswrapper[4779]: I0929 19:09:10.441366 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:10 crc kubenswrapper[4779]: I0929 19:09:10.441437 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:10 crc kubenswrapper[4779]: I0929 19:09:10.441453 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:10 crc kubenswrapper[4779]: I0929 19:09:10.441479 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:10 crc kubenswrapper[4779]: I0929 19:09:10.441496 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:10Z","lastTransitionTime":"2025-09-29T19:09:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:10 crc kubenswrapper[4779]: I0929 19:09:10.544804 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:10 crc kubenswrapper[4779]: I0929 19:09:10.544857 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:10 crc kubenswrapper[4779]: I0929 19:09:10.544875 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:10 crc kubenswrapper[4779]: I0929 19:09:10.544896 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:10 crc kubenswrapper[4779]: I0929 19:09:10.544913 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:10Z","lastTransitionTime":"2025-09-29T19:09:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:10 crc kubenswrapper[4779]: I0929 19:09:10.648275 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:10 crc kubenswrapper[4779]: I0929 19:09:10.648419 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:10 crc kubenswrapper[4779]: I0929 19:09:10.648433 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:10 crc kubenswrapper[4779]: I0929 19:09:10.648451 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:10 crc kubenswrapper[4779]: I0929 19:09:10.648463 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:10Z","lastTransitionTime":"2025-09-29T19:09:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:10 crc kubenswrapper[4779]: I0929 19:09:10.751317 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:10 crc kubenswrapper[4779]: I0929 19:09:10.751372 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:10 crc kubenswrapper[4779]: I0929 19:09:10.751380 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:10 crc kubenswrapper[4779]: I0929 19:09:10.751394 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:10 crc kubenswrapper[4779]: I0929 19:09:10.751404 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:10Z","lastTransitionTime":"2025-09-29T19:09:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:10 crc kubenswrapper[4779]: I0929 19:09:10.765620 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-2rtwf" Sep 29 19:09:10 crc kubenswrapper[4779]: I0929 19:09:10.765653 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 19:09:10 crc kubenswrapper[4779]: I0929 19:09:10.765656 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 19:09:10 crc kubenswrapper[4779]: I0929 19:09:10.765724 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 19:09:10 crc kubenswrapper[4779]: E0929 19:09:10.765865 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-2rtwf" podUID="4df079c4-34e3-4132-91bb-ad68488552f8" Sep 29 19:09:10 crc kubenswrapper[4779]: E0929 19:09:10.765953 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 19:09:10 crc kubenswrapper[4779]: E0929 19:09:10.766021 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 19:09:10 crc kubenswrapper[4779]: E0929 19:09:10.766157 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 19:09:10 crc kubenswrapper[4779]: I0929 19:09:10.854646 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:10 crc kubenswrapper[4779]: I0929 19:09:10.854705 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:10 crc kubenswrapper[4779]: I0929 19:09:10.854714 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:10 crc kubenswrapper[4779]: I0929 19:09:10.854727 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:10 crc kubenswrapper[4779]: I0929 19:09:10.854736 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:10Z","lastTransitionTime":"2025-09-29T19:09:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:10 crc kubenswrapper[4779]: I0929 19:09:10.957983 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:10 crc kubenswrapper[4779]: I0929 19:09:10.958055 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:10 crc kubenswrapper[4779]: I0929 19:09:10.958077 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:10 crc kubenswrapper[4779]: I0929 19:09:10.958106 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:10 crc kubenswrapper[4779]: I0929 19:09:10.958129 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:10Z","lastTransitionTime":"2025-09-29T19:09:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:11 crc kubenswrapper[4779]: I0929 19:09:11.060610 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:11 crc kubenswrapper[4779]: I0929 19:09:11.060702 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:11 crc kubenswrapper[4779]: I0929 19:09:11.060726 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:11 crc kubenswrapper[4779]: I0929 19:09:11.060754 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:11 crc kubenswrapper[4779]: I0929 19:09:11.060770 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:11Z","lastTransitionTime":"2025-09-29T19:09:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:11 crc kubenswrapper[4779]: I0929 19:09:11.163494 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:11 crc kubenswrapper[4779]: I0929 19:09:11.163556 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:11 crc kubenswrapper[4779]: I0929 19:09:11.163579 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:11 crc kubenswrapper[4779]: I0929 19:09:11.163611 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:11 crc kubenswrapper[4779]: I0929 19:09:11.163635 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:11Z","lastTransitionTime":"2025-09-29T19:09:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:11 crc kubenswrapper[4779]: I0929 19:09:11.266115 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:11 crc kubenswrapper[4779]: I0929 19:09:11.266179 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:11 crc kubenswrapper[4779]: I0929 19:09:11.266197 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:11 crc kubenswrapper[4779]: I0929 19:09:11.266223 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:11 crc kubenswrapper[4779]: I0929 19:09:11.266241 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:11Z","lastTransitionTime":"2025-09-29T19:09:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:11 crc kubenswrapper[4779]: I0929 19:09:11.368545 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:11 crc kubenswrapper[4779]: I0929 19:09:11.368587 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:11 crc kubenswrapper[4779]: I0929 19:09:11.368597 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:11 crc kubenswrapper[4779]: I0929 19:09:11.368612 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:11 crc kubenswrapper[4779]: I0929 19:09:11.368622 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:11Z","lastTransitionTime":"2025-09-29T19:09:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:11 crc kubenswrapper[4779]: I0929 19:09:11.471640 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:11 crc kubenswrapper[4779]: I0929 19:09:11.471732 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:11 crc kubenswrapper[4779]: I0929 19:09:11.471915 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:11 crc kubenswrapper[4779]: I0929 19:09:11.471935 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:11 crc kubenswrapper[4779]: I0929 19:09:11.471945 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:11Z","lastTransitionTime":"2025-09-29T19:09:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:11 crc kubenswrapper[4779]: I0929 19:09:11.574167 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:11 crc kubenswrapper[4779]: I0929 19:09:11.574241 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:11 crc kubenswrapper[4779]: I0929 19:09:11.574264 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:11 crc kubenswrapper[4779]: I0929 19:09:11.574295 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:11 crc kubenswrapper[4779]: I0929 19:09:11.574322 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:11Z","lastTransitionTime":"2025-09-29T19:09:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:11 crc kubenswrapper[4779]: I0929 19:09:11.677717 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:11 crc kubenswrapper[4779]: I0929 19:09:11.677782 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:11 crc kubenswrapper[4779]: I0929 19:09:11.677799 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:11 crc kubenswrapper[4779]: I0929 19:09:11.677822 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:11 crc kubenswrapper[4779]: I0929 19:09:11.677839 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:11Z","lastTransitionTime":"2025-09-29T19:09:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:11 crc kubenswrapper[4779]: I0929 19:09:11.781087 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:11 crc kubenswrapper[4779]: I0929 19:09:11.781160 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:11 crc kubenswrapper[4779]: I0929 19:09:11.781182 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:11 crc kubenswrapper[4779]: I0929 19:09:11.781209 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:11 crc kubenswrapper[4779]: I0929 19:09:11.781230 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:11Z","lastTransitionTime":"2025-09-29T19:09:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:11 crc kubenswrapper[4779]: I0929 19:09:11.884115 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:11 crc kubenswrapper[4779]: I0929 19:09:11.884181 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:11 crc kubenswrapper[4779]: I0929 19:09:11.884198 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:11 crc kubenswrapper[4779]: I0929 19:09:11.884224 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:11 crc kubenswrapper[4779]: I0929 19:09:11.884241 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:11Z","lastTransitionTime":"2025-09-29T19:09:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:11 crc kubenswrapper[4779]: I0929 19:09:11.987042 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:11 crc kubenswrapper[4779]: I0929 19:09:11.987086 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:11 crc kubenswrapper[4779]: I0929 19:09:11.987098 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:11 crc kubenswrapper[4779]: I0929 19:09:11.987114 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:11 crc kubenswrapper[4779]: I0929 19:09:11.987126 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:11Z","lastTransitionTime":"2025-09-29T19:09:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:12 crc kubenswrapper[4779]: I0929 19:09:12.089923 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:12 crc kubenswrapper[4779]: I0929 19:09:12.089989 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:12 crc kubenswrapper[4779]: I0929 19:09:12.090010 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:12 crc kubenswrapper[4779]: I0929 19:09:12.090034 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:12 crc kubenswrapper[4779]: I0929 19:09:12.090054 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:12Z","lastTransitionTime":"2025-09-29T19:09:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:12 crc kubenswrapper[4779]: I0929 19:09:12.193577 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:12 crc kubenswrapper[4779]: I0929 19:09:12.193627 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:12 crc kubenswrapper[4779]: I0929 19:09:12.193638 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:12 crc kubenswrapper[4779]: I0929 19:09:12.193660 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:12 crc kubenswrapper[4779]: I0929 19:09:12.193672 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:12Z","lastTransitionTime":"2025-09-29T19:09:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:12 crc kubenswrapper[4779]: I0929 19:09:12.296420 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:12 crc kubenswrapper[4779]: I0929 19:09:12.296452 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:12 crc kubenswrapper[4779]: I0929 19:09:12.296461 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:12 crc kubenswrapper[4779]: I0929 19:09:12.296473 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:12 crc kubenswrapper[4779]: I0929 19:09:12.296482 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:12Z","lastTransitionTime":"2025-09-29T19:09:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:12 crc kubenswrapper[4779]: I0929 19:09:12.399655 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:12 crc kubenswrapper[4779]: I0929 19:09:12.399731 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:12 crc kubenswrapper[4779]: I0929 19:09:12.399755 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:12 crc kubenswrapper[4779]: I0929 19:09:12.399786 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:12 crc kubenswrapper[4779]: I0929 19:09:12.399807 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:12Z","lastTransitionTime":"2025-09-29T19:09:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:12 crc kubenswrapper[4779]: I0929 19:09:12.502303 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:12 crc kubenswrapper[4779]: I0929 19:09:12.502400 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:12 crc kubenswrapper[4779]: I0929 19:09:12.502420 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:12 crc kubenswrapper[4779]: I0929 19:09:12.502445 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:12 crc kubenswrapper[4779]: I0929 19:09:12.502464 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:12Z","lastTransitionTime":"2025-09-29T19:09:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:12 crc kubenswrapper[4779]: I0929 19:09:12.605468 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:12 crc kubenswrapper[4779]: I0929 19:09:12.605526 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:12 crc kubenswrapper[4779]: I0929 19:09:12.605545 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:12 crc kubenswrapper[4779]: I0929 19:09:12.605568 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:12 crc kubenswrapper[4779]: I0929 19:09:12.605584 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:12Z","lastTransitionTime":"2025-09-29T19:09:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:12 crc kubenswrapper[4779]: I0929 19:09:12.615653 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 29 19:09:12 crc kubenswrapper[4779]: I0929 19:09:12.633677 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"476bc421-1113-455e-bcc8-e207e47dad19\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://30844145987b3dae28d2ca3f75a36e21c27216b267ff40fdb83094b8045403c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tb7zk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7168ecfffda2ef4f7782650466ccebb8abcff3293fcd33d44eb4ee24abf6339c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tb7zk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:30Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-d5cnr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:09:12Z is after 2025-08-24T17:21:41Z" Sep 29 19:09:12 crc kubenswrapper[4779]: I0929 19:09:12.647075 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-2rtwf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4df079c4-34e3-4132-91bb-ad68488552f8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7sr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7sr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:44Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-2rtwf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:09:12Z is after 2025-08-24T17:21:41Z" Sep 29 19:09:12 crc kubenswrapper[4779]: I0929 19:09:12.661372 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:09:12Z is after 2025-08-24T17:21:41Z" Sep 29 19:09:12 crc kubenswrapper[4779]: I0929 19:09:12.674523 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://04d9dd8d319497dd3ed7c565217d8b9d228a1952a3cd0e8dee3cdad781c5770a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:09:12Z is after 2025-08-24T17:21:41Z" Sep 29 19:09:12 crc kubenswrapper[4779]: I0929 19:09:12.685741 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://15b9c6a3d7c79b30673cd8040a9f3033e2405cf2e29b38d8ba2bee9cbb384794\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:09:12Z is after 2025-08-24T17:21:41Z" Sep 29 19:09:12 crc kubenswrapper[4779]: I0929 19:09:12.698630 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jfbb6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3ac24bbf-c37a-4253-be71-8d8f15cfd48e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2ea6959ae00f851559cc059498a3a7848fbbb55f6a25ed82e38efe62a7ee85ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8ckjq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:30Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jfbb6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:09:12Z is after 2025-08-24T17:21:41Z" Sep 29 19:09:12 crc kubenswrapper[4779]: I0929 19:09:12.707955 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:12 crc kubenswrapper[4779]: I0929 19:09:12.707996 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:12 crc kubenswrapper[4779]: I0929 19:09:12.708007 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:12 crc kubenswrapper[4779]: I0929 19:09:12.708024 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:12 crc kubenswrapper[4779]: I0929 19:09:12.708035 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:12Z","lastTransitionTime":"2025-09-29T19:09:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:12 crc kubenswrapper[4779]: I0929 19:09:12.712625 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-cvx8m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"54a33b8e-b623-4f91-be1d-a38dfcef17d7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://49599fb872eb458cb3fd77da63518f77653f671e82a8fc1db5f9e2b64e3c4d1d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e697e7c54a1b8adfde2dfa86399bfd13d895196c12108ac625bd9b94ad913d1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e697e7c54a1b8adfde2dfa86399bfd13d895196c12108ac625bd9b94ad913d1e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://75932bd27c986408eb2a3ec9aefffa392bf1ede4be0ddeabb9fdcd5fcc56199d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://75932bd27c986408eb2a3ec9aefffa392bf1ede4be0ddeabb9fdcd5fcc56199d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e058271bf65bb6f216298a9fc58b97c76335275829be7e5bec59d92688e83a05\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e058271bf65bb6f216298a9fc58b97c76335275829be7e5bec59d92688e83a05\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://edec2f7ae27e72dcc2eac62281dc9b866cdc31ecbda5e24020043aad86a44784\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://edec2f7ae27e72dcc2eac62281dc9b866cdc31ecbda5e24020043aad86a44784\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1750034ffcb61628b437e7b4f604be66ea6401d0ddb8a70f37fbb358c261670f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1750034ffcb61628b437e7b4f604be66ea6401d0ddb8a70f37fbb358c261670f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3409883fc5d9ed6b5e08d50c9d2821a4ac0b8932c9a0296c12065b0abb0354d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3409883fc5d9ed6b5e08d50c9d2821a4ac0b8932c9a0296c12065b0abb0354d6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:30Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-cvx8m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:09:12Z is after 2025-08-24T17:21:41Z" Sep 29 19:09:12 crc kubenswrapper[4779]: I0929 19:09:12.725730 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"49dd46af-a7fa-47b0-94ab-c9a999564fca\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a67efc46aeb5956568b680b4041d83a55c0410ccefdb4e43b95503c6416e1dfc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2253a3406f6de9463c1c615ad2ac76ce935067baa57b260f18afdb8d9e639e95\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa9b804f65dc9265342d6ec10b8cdd31d49f692289c7fd8292944e25a2c95ebe\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://02428a90ed2dd1c36d800ffda10ed8759d92a87ec8f467cdf9fb11511b9b9420\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:09Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:09:12Z is after 2025-08-24T17:21:41Z" Sep 29 19:09:12 crc kubenswrapper[4779]: I0929 19:09:12.738460 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ef5a50f788b42b32f4ec04eedc9f5c9dbcd07f64b430ec684d8a91b414e6c18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb910a5c408a63f6c4391cf2fae320e14d6dc945125b53eefde3b666ef9d0754\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:09:12Z is after 2025-08-24T17:21:41Z" Sep 29 19:09:12 crc kubenswrapper[4779]: I0929 19:09:12.753478 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-42vjg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"046df2ef-fb75-4d32-93e6-17b36af0a7c2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://629d703a556665e86ce3b0a5af995157440c71c8ab7d29b9259865cd624543c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1555e96e70a9119cf2298d91512b1d8cffb303b4327d0e20bdf686e7e36e4461\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0d18ddf7528c789be80d32cd6921ada5ed114d01130b7be97e73ec6d9168267e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b6ffbcb52a6e250cd4b4bc4e8c089ef94babaaf0db836fec9f477450464eebe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://76304a56be996b646d706c0e7df995bb1518962396c7558304b1627e5c215c11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6dcd0277739f6d5e5c90c617485f053c1c3996192f15b081c660741987ec2611\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://23219c41b54182e6c1c57246636336210c37b9a2c9b3beedbb57daee3d909433\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://23219c41b54182e6c1c57246636336210c37b9a2c9b3beedbb57daee3d909433\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T19:09:03Z\\\",\\\"message\\\":\\\"r network=default : 881.553µs\\\\nI0929 19:09:03.467164 6425 obj_retry.go:303] Retry object setup: *v1.Pod openshift-network-operator/network-operator-58b4c7f79c-55gtf\\\\nI0929 19:09:03.466800 6425 services_controller.go:356] Processing sync for service openshift-dns-operator/metrics for network=default\\\\nI0929 19:09:03.467178 6425 obj_retry.go:365] Adding new object: *v1.Pod openshift-network-operator/network-operator-58b4c7f79c-55gtf\\\\nI0929 19:09:03.467185 6425 ovn.go:134] Ensuring zone local for Pod openshift-network-operator/network-operator-58b4c7f79c-55gtf in node crc\\\\nI0929 19:09:03.467190 6425 obj_retry.go:386] Retry successful for *v1.Pod openshift-network-operator/network-operator-58b4c7f79c-55gtf after 0 failed attempt(s)\\\\nI0929 19:09:03.467195 6425 default_network_controller.go:776] Recording success event on pod openshift-network-operator/network-operator-58b4c7f79c-55gtf\\\\nI0929 19:09:03.466832 6425 services_controller.go:434] Service default/kubernetes retrieved from lister for network=default: \\\\u0026Service{ObjectMeta:{kubernetes default 1fcaffea-cfe2-4295-9c2a-a3b3626fb3f1 259 0 2025-02-23 05:11:12 +0000 UTC \\\\u003cnil\\\\u003e \\\\u003cnil\\\\u003e map[component:apiserver provider:kubernetes] map[] [] [] []},Spec:ServiceSpec{Ports:[]ServicePort{ServicePort{Name:https,Protocol:TCP,Port:443,TargetPort:{0 6443 },NodePort:0,AppProtocol:nil,},},Selector:m\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T19:09:02Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-42vjg_openshift-ovn-kubernetes(046df2ef-fb75-4d32-93e6-17b36af0a7c2)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c8993f5be0b77b752f60e7346f609aaeef20db2f0e72cbebde06a11fa7d2f5bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e1e03c63d08ebe2a4b76954140684a5b43da9e92b93490aa7efa715fc2aefb4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e1e03c63d08ebe2a4b76954140684a5b43da9e92b93490aa7efa715fc2aefb4c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:30Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-42vjg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:09:12Z is after 2025-08-24T17:21:41Z" Sep 29 19:09:12 crc kubenswrapper[4779]: I0929 19:09:12.761495 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-zxpg4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e0db9b28-7e3a-4d44-9e98-1a07c8e5b8d6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2f0f837dfb05912fb4f7b0cb08cdb363cb3bfe2cc91a0bebd1b2b2b8128d78cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6ghv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:33Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-zxpg4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:09:12Z is after 2025-08-24T17:21:41Z" Sep 29 19:09:12 crc kubenswrapper[4779]: I0929 19:09:12.765358 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 19:09:12 crc kubenswrapper[4779]: I0929 19:09:12.765383 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 19:09:12 crc kubenswrapper[4779]: I0929 19:09:12.765383 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 19:09:12 crc kubenswrapper[4779]: I0929 19:09:12.765461 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-2rtwf" Sep 29 19:09:12 crc kubenswrapper[4779]: E0929 19:09:12.765576 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 19:09:12 crc kubenswrapper[4779]: E0929 19:09:12.765676 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 19:09:12 crc kubenswrapper[4779]: E0929 19:09:12.765800 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 19:09:12 crc kubenswrapper[4779]: E0929 19:09:12.765970 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-2rtwf" podUID="4df079c4-34e3-4132-91bb-ad68488552f8" Sep 29 19:09:12 crc kubenswrapper[4779]: I0929 19:09:12.771053 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"35c2e46d-85f5-4dbc-b75c-ff3cab6cc941\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:09:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:09:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://341b30347851b6ce8681296d5622e32df85756f3a372075489023227f0b7828b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://72b9d027e1821e0de6cae982b60001ce62087f205bf5553d9d65c6cfe2d6e0c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://703b4d9d7a59ba642408a7920fd8c2753f1474f28acaa371e0a6b90c75bb7e91\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6fcb83ea7913513031d2c874a33ec806cf823bf8e3838378db3d9c94ca9229a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6fcb83ea7913513031d2c874a33ec806cf823bf8e3838378db3d9c94ca9229a6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:10Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:09Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:09:12Z is after 2025-08-24T17:21:41Z" Sep 29 19:09:12 crc kubenswrapper[4779]: I0929 19:09:12.781613 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2f701cb-2e7d-4cea-8455-f68605964ac6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:09:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:09:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dfa32890cde2a0a47211239f797f83380b4418ad2b8fb1560c779705e68df39c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a017f2621de03e0184a0a168819a6762e4b36a1ae5c66a6bcfd1d65266fcb52\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a9691127cc0437ceebef7a82219adb83625690db4d6bf5cf1c8177a6a9e1284\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://19af6b603d6261471bf10fbf65244aa6842b15a02fbbdea09a8ab2d01ec1e1a4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b14ae0b08f077b3acbe802e4a5ec06dd10075100c37d5587119a09a92b4651a\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T19:08:29Z\\\",\\\"message\\\":\\\"le observer\\\\nW0929 19:08:29.408025 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0929 19:08:29.408423 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 19:08:29.411188 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1038548135/tls.crt::/tmp/serving-cert-1038548135/tls.key\\\\\\\"\\\\nI0929 19:08:29.848353 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0929 19:08:29.866865 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0929 19:08:29.866980 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0929 19:08:29.867032 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0929 19:08:29.867061 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0929 19:08:29.872666 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0929 19:08:29.872766 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0929 19:08:29.872793 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0929 19:08:29.872817 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0929 19:08:29.872839 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0929 19:08:29.872861 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0929 19:08:29.872883 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0929 19:08:29.873077 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0929 19:08:29.875629 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:24Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f0916d37ad679d190403229b40390af2860e24f777a07cd5ef0ac2f7d3fc3c41\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5a737775c31cf8dc37838d904ee00f1075bc2aa52bea63587806cb7486950eaf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5a737775c31cf8dc37838d904ee00f1075bc2aa52bea63587806cb7486950eaf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:09Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:09:12Z is after 2025-08-24T17:21:41Z" Sep 29 19:09:12 crc kubenswrapper[4779]: I0929 19:09:12.792110 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:09:12Z is after 2025-08-24T17:21:41Z" Sep 29 19:09:12 crc kubenswrapper[4779]: I0929 19:09:12.800356 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-7hb2m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"45b89d12-bbd2-4b47-815d-a7421cc1aa00\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://002d12663649198e6136f3e9df16a3b1cd8c64906bf39bde03cd0dde8c36abbc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bvn4g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:30Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-7hb2m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:09:12Z is after 2025-08-24T17:21:41Z" Sep 29 19:09:12 crc kubenswrapper[4779]: I0929 19:09:12.809810 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:12 crc kubenswrapper[4779]: I0929 19:09:12.809898 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:12 crc kubenswrapper[4779]: I0929 19:09:12.809915 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:12 crc kubenswrapper[4779]: I0929 19:09:12.809964 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:12 crc kubenswrapper[4779]: I0929 19:09:12.809978 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:12Z","lastTransitionTime":"2025-09-29T19:09:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:12 crc kubenswrapper[4779]: I0929 19:09:12.811439 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:09:12Z is after 2025-08-24T17:21:41Z" Sep 29 19:09:12 crc kubenswrapper[4779]: I0929 19:09:12.822912 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fvdtc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"97c4781c-2d4b-4eab-96fb-39a342c2d4a0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c2179d9cf26eb6d6fcc63666d3dd53e9a1a66cf633c9fe0b29e6d6ac726c8119\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7cxcv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3aeed56672d12f2355eda710f9021c6271debfaf1f7b7d6b1ff5f4ecc6764e2e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7cxcv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:43Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-fvdtc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:09:12Z is after 2025-08-24T17:21:41Z" Sep 29 19:09:12 crc kubenswrapper[4779]: I0929 19:09:12.912001 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:12 crc kubenswrapper[4779]: I0929 19:09:12.912071 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:12 crc kubenswrapper[4779]: I0929 19:09:12.912091 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:12 crc kubenswrapper[4779]: I0929 19:09:12.912116 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:12 crc kubenswrapper[4779]: I0929 19:09:12.912134 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:12Z","lastTransitionTime":"2025-09-29T19:09:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:13 crc kubenswrapper[4779]: I0929 19:09:13.014428 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:13 crc kubenswrapper[4779]: I0929 19:09:13.014478 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:13 crc kubenswrapper[4779]: I0929 19:09:13.014489 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:13 crc kubenswrapper[4779]: I0929 19:09:13.014507 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:13 crc kubenswrapper[4779]: I0929 19:09:13.014518 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:13Z","lastTransitionTime":"2025-09-29T19:09:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:13 crc kubenswrapper[4779]: I0929 19:09:13.115937 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:13 crc kubenswrapper[4779]: I0929 19:09:13.115974 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:13 crc kubenswrapper[4779]: I0929 19:09:13.115982 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:13 crc kubenswrapper[4779]: I0929 19:09:13.115997 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:13 crc kubenswrapper[4779]: I0929 19:09:13.116005 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:13Z","lastTransitionTime":"2025-09-29T19:09:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:13 crc kubenswrapper[4779]: I0929 19:09:13.219044 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:13 crc kubenswrapper[4779]: I0929 19:09:13.219104 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:13 crc kubenswrapper[4779]: I0929 19:09:13.219139 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:13 crc kubenswrapper[4779]: I0929 19:09:13.219164 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:13 crc kubenswrapper[4779]: I0929 19:09:13.219181 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:13Z","lastTransitionTime":"2025-09-29T19:09:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:13 crc kubenswrapper[4779]: I0929 19:09:13.322373 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:13 crc kubenswrapper[4779]: I0929 19:09:13.322420 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:13 crc kubenswrapper[4779]: I0929 19:09:13.322431 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:13 crc kubenswrapper[4779]: I0929 19:09:13.322450 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:13 crc kubenswrapper[4779]: I0929 19:09:13.322462 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:13Z","lastTransitionTime":"2025-09-29T19:09:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:13 crc kubenswrapper[4779]: I0929 19:09:13.425275 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:13 crc kubenswrapper[4779]: I0929 19:09:13.425348 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:13 crc kubenswrapper[4779]: I0929 19:09:13.425371 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:13 crc kubenswrapper[4779]: I0929 19:09:13.425389 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:13 crc kubenswrapper[4779]: I0929 19:09:13.425400 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:13Z","lastTransitionTime":"2025-09-29T19:09:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:13 crc kubenswrapper[4779]: I0929 19:09:13.528475 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:13 crc kubenswrapper[4779]: I0929 19:09:13.528532 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:13 crc kubenswrapper[4779]: I0929 19:09:13.528556 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:13 crc kubenswrapper[4779]: I0929 19:09:13.528586 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:13 crc kubenswrapper[4779]: I0929 19:09:13.528607 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:13Z","lastTransitionTime":"2025-09-29T19:09:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:13 crc kubenswrapper[4779]: I0929 19:09:13.631160 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:13 crc kubenswrapper[4779]: I0929 19:09:13.631228 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:13 crc kubenswrapper[4779]: I0929 19:09:13.631247 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:13 crc kubenswrapper[4779]: I0929 19:09:13.631267 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:13 crc kubenswrapper[4779]: I0929 19:09:13.631279 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:13Z","lastTransitionTime":"2025-09-29T19:09:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:13 crc kubenswrapper[4779]: I0929 19:09:13.733995 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:13 crc kubenswrapper[4779]: I0929 19:09:13.734075 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:13 crc kubenswrapper[4779]: I0929 19:09:13.734086 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:13 crc kubenswrapper[4779]: I0929 19:09:13.734103 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:13 crc kubenswrapper[4779]: I0929 19:09:13.734115 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:13Z","lastTransitionTime":"2025-09-29T19:09:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:13 crc kubenswrapper[4779]: I0929 19:09:13.836219 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:13 crc kubenswrapper[4779]: I0929 19:09:13.836269 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:13 crc kubenswrapper[4779]: I0929 19:09:13.836279 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:13 crc kubenswrapper[4779]: I0929 19:09:13.836294 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:13 crc kubenswrapper[4779]: I0929 19:09:13.836305 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:13Z","lastTransitionTime":"2025-09-29T19:09:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:13 crc kubenswrapper[4779]: I0929 19:09:13.939013 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:13 crc kubenswrapper[4779]: I0929 19:09:13.939054 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:13 crc kubenswrapper[4779]: I0929 19:09:13.939064 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:13 crc kubenswrapper[4779]: I0929 19:09:13.939080 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:13 crc kubenswrapper[4779]: I0929 19:09:13.939091 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:13Z","lastTransitionTime":"2025-09-29T19:09:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:14 crc kubenswrapper[4779]: I0929 19:09:14.041648 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:14 crc kubenswrapper[4779]: I0929 19:09:14.041720 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:14 crc kubenswrapper[4779]: I0929 19:09:14.041742 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:14 crc kubenswrapper[4779]: I0929 19:09:14.041772 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:14 crc kubenswrapper[4779]: I0929 19:09:14.041793 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:14Z","lastTransitionTime":"2025-09-29T19:09:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:14 crc kubenswrapper[4779]: I0929 19:09:14.144537 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:14 crc kubenswrapper[4779]: I0929 19:09:14.144584 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:14 crc kubenswrapper[4779]: I0929 19:09:14.144594 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:14 crc kubenswrapper[4779]: I0929 19:09:14.144618 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:14 crc kubenswrapper[4779]: I0929 19:09:14.144632 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:14Z","lastTransitionTime":"2025-09-29T19:09:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:14 crc kubenswrapper[4779]: I0929 19:09:14.247776 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:14 crc kubenswrapper[4779]: I0929 19:09:14.247860 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:14 crc kubenswrapper[4779]: I0929 19:09:14.247877 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:14 crc kubenswrapper[4779]: I0929 19:09:14.247901 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:14 crc kubenswrapper[4779]: I0929 19:09:14.247918 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:14Z","lastTransitionTime":"2025-09-29T19:09:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:14 crc kubenswrapper[4779]: I0929 19:09:14.350802 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:14 crc kubenswrapper[4779]: I0929 19:09:14.350859 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:14 crc kubenswrapper[4779]: I0929 19:09:14.350880 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:14 crc kubenswrapper[4779]: I0929 19:09:14.350906 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:14 crc kubenswrapper[4779]: I0929 19:09:14.350927 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:14Z","lastTransitionTime":"2025-09-29T19:09:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:14 crc kubenswrapper[4779]: I0929 19:09:14.453543 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:14 crc kubenswrapper[4779]: I0929 19:09:14.453587 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:14 crc kubenswrapper[4779]: I0929 19:09:14.453598 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:14 crc kubenswrapper[4779]: I0929 19:09:14.453613 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:14 crc kubenswrapper[4779]: I0929 19:09:14.453622 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:14Z","lastTransitionTime":"2025-09-29T19:09:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:14 crc kubenswrapper[4779]: I0929 19:09:14.556243 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:14 crc kubenswrapper[4779]: I0929 19:09:14.556284 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:14 crc kubenswrapper[4779]: I0929 19:09:14.556295 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:14 crc kubenswrapper[4779]: I0929 19:09:14.556310 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:14 crc kubenswrapper[4779]: I0929 19:09:14.556339 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:14Z","lastTransitionTime":"2025-09-29T19:09:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:14 crc kubenswrapper[4779]: I0929 19:09:14.658081 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:14 crc kubenswrapper[4779]: I0929 19:09:14.658129 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:14 crc kubenswrapper[4779]: I0929 19:09:14.658145 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:14 crc kubenswrapper[4779]: I0929 19:09:14.658168 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:14 crc kubenswrapper[4779]: I0929 19:09:14.658184 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:14Z","lastTransitionTime":"2025-09-29T19:09:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:14 crc kubenswrapper[4779]: I0929 19:09:14.761788 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:14 crc kubenswrapper[4779]: I0929 19:09:14.761843 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:14 crc kubenswrapper[4779]: I0929 19:09:14.761860 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:14 crc kubenswrapper[4779]: I0929 19:09:14.761891 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:14 crc kubenswrapper[4779]: I0929 19:09:14.761913 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:14Z","lastTransitionTime":"2025-09-29T19:09:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:14 crc kubenswrapper[4779]: I0929 19:09:14.765183 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 19:09:14 crc kubenswrapper[4779]: I0929 19:09:14.765241 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 19:09:14 crc kubenswrapper[4779]: I0929 19:09:14.765267 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 19:09:14 crc kubenswrapper[4779]: I0929 19:09:14.765449 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-2rtwf" Sep 29 19:09:14 crc kubenswrapper[4779]: E0929 19:09:14.765441 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 19:09:14 crc kubenswrapper[4779]: E0929 19:09:14.765627 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-2rtwf" podUID="4df079c4-34e3-4132-91bb-ad68488552f8" Sep 29 19:09:14 crc kubenswrapper[4779]: E0929 19:09:14.765785 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 19:09:14 crc kubenswrapper[4779]: E0929 19:09:14.765935 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 19:09:14 crc kubenswrapper[4779]: I0929 19:09:14.864247 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:14 crc kubenswrapper[4779]: I0929 19:09:14.864312 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:14 crc kubenswrapper[4779]: I0929 19:09:14.864362 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:14 crc kubenswrapper[4779]: I0929 19:09:14.864385 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:14 crc kubenswrapper[4779]: I0929 19:09:14.864402 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:14Z","lastTransitionTime":"2025-09-29T19:09:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:14 crc kubenswrapper[4779]: I0929 19:09:14.966816 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:14 crc kubenswrapper[4779]: I0929 19:09:14.966855 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:14 crc kubenswrapper[4779]: I0929 19:09:14.966862 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:14 crc kubenswrapper[4779]: I0929 19:09:14.966878 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:14 crc kubenswrapper[4779]: I0929 19:09:14.966886 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:14Z","lastTransitionTime":"2025-09-29T19:09:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:15 crc kubenswrapper[4779]: I0929 19:09:15.068670 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:15 crc kubenswrapper[4779]: I0929 19:09:15.068724 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:15 crc kubenswrapper[4779]: I0929 19:09:15.068736 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:15 crc kubenswrapper[4779]: I0929 19:09:15.068750 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:15 crc kubenswrapper[4779]: I0929 19:09:15.068762 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:15Z","lastTransitionTime":"2025-09-29T19:09:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:15 crc kubenswrapper[4779]: I0929 19:09:15.171721 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:15 crc kubenswrapper[4779]: I0929 19:09:15.171786 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:15 crc kubenswrapper[4779]: I0929 19:09:15.171823 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:15 crc kubenswrapper[4779]: I0929 19:09:15.171848 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:15 crc kubenswrapper[4779]: I0929 19:09:15.171867 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:15Z","lastTransitionTime":"2025-09-29T19:09:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:15 crc kubenswrapper[4779]: I0929 19:09:15.274647 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:15 crc kubenswrapper[4779]: I0929 19:09:15.274685 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:15 crc kubenswrapper[4779]: I0929 19:09:15.274695 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:15 crc kubenswrapper[4779]: I0929 19:09:15.274711 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:15 crc kubenswrapper[4779]: I0929 19:09:15.274724 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:15Z","lastTransitionTime":"2025-09-29T19:09:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:15 crc kubenswrapper[4779]: I0929 19:09:15.377424 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:15 crc kubenswrapper[4779]: I0929 19:09:15.377465 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:15 crc kubenswrapper[4779]: I0929 19:09:15.377477 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:15 crc kubenswrapper[4779]: I0929 19:09:15.377491 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:15 crc kubenswrapper[4779]: I0929 19:09:15.377499 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:15Z","lastTransitionTime":"2025-09-29T19:09:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:15 crc kubenswrapper[4779]: I0929 19:09:15.479685 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:15 crc kubenswrapper[4779]: I0929 19:09:15.479749 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:15 crc kubenswrapper[4779]: I0929 19:09:15.479771 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:15 crc kubenswrapper[4779]: I0929 19:09:15.479798 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:15 crc kubenswrapper[4779]: I0929 19:09:15.479819 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:15Z","lastTransitionTime":"2025-09-29T19:09:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:15 crc kubenswrapper[4779]: I0929 19:09:15.582621 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:15 crc kubenswrapper[4779]: I0929 19:09:15.582665 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:15 crc kubenswrapper[4779]: I0929 19:09:15.582674 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:15 crc kubenswrapper[4779]: I0929 19:09:15.582688 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:15 crc kubenswrapper[4779]: I0929 19:09:15.582700 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:15Z","lastTransitionTime":"2025-09-29T19:09:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:15 crc kubenswrapper[4779]: I0929 19:09:15.685656 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:15 crc kubenswrapper[4779]: I0929 19:09:15.685741 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:15 crc kubenswrapper[4779]: I0929 19:09:15.685762 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:15 crc kubenswrapper[4779]: I0929 19:09:15.685792 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:15 crc kubenswrapper[4779]: I0929 19:09:15.685815 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:15Z","lastTransitionTime":"2025-09-29T19:09:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:15 crc kubenswrapper[4779]: I0929 19:09:15.791736 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:15 crc kubenswrapper[4779]: I0929 19:09:15.791814 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:15 crc kubenswrapper[4779]: I0929 19:09:15.791826 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:15 crc kubenswrapper[4779]: I0929 19:09:15.791848 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:15 crc kubenswrapper[4779]: I0929 19:09:15.791880 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:15Z","lastTransitionTime":"2025-09-29T19:09:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:15 crc kubenswrapper[4779]: I0929 19:09:15.918076 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:15 crc kubenswrapper[4779]: I0929 19:09:15.918129 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:15 crc kubenswrapper[4779]: I0929 19:09:15.918144 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:15 crc kubenswrapper[4779]: I0929 19:09:15.918167 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:15 crc kubenswrapper[4779]: I0929 19:09:15.918182 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:15Z","lastTransitionTime":"2025-09-29T19:09:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:16 crc kubenswrapper[4779]: I0929 19:09:16.021118 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:16 crc kubenswrapper[4779]: I0929 19:09:16.021162 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:16 crc kubenswrapper[4779]: I0929 19:09:16.021174 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:16 crc kubenswrapper[4779]: I0929 19:09:16.021189 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:16 crc kubenswrapper[4779]: I0929 19:09:16.021201 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:16Z","lastTransitionTime":"2025-09-29T19:09:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:16 crc kubenswrapper[4779]: I0929 19:09:16.123486 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:16 crc kubenswrapper[4779]: I0929 19:09:16.123547 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:16 crc kubenswrapper[4779]: I0929 19:09:16.123566 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:16 crc kubenswrapper[4779]: I0929 19:09:16.123591 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:16 crc kubenswrapper[4779]: I0929 19:09:16.123620 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:16Z","lastTransitionTime":"2025-09-29T19:09:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:16 crc kubenswrapper[4779]: I0929 19:09:16.226537 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:16 crc kubenswrapper[4779]: I0929 19:09:16.226579 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:16 crc kubenswrapper[4779]: I0929 19:09:16.226591 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:16 crc kubenswrapper[4779]: I0929 19:09:16.226613 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:16 crc kubenswrapper[4779]: I0929 19:09:16.226624 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:16Z","lastTransitionTime":"2025-09-29T19:09:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:16 crc kubenswrapper[4779]: I0929 19:09:16.329256 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:16 crc kubenswrapper[4779]: I0929 19:09:16.329298 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:16 crc kubenswrapper[4779]: I0929 19:09:16.329306 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:16 crc kubenswrapper[4779]: I0929 19:09:16.329339 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:16 crc kubenswrapper[4779]: I0929 19:09:16.329349 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:16Z","lastTransitionTime":"2025-09-29T19:09:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:16 crc kubenswrapper[4779]: I0929 19:09:16.431860 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:16 crc kubenswrapper[4779]: I0929 19:09:16.431897 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:16 crc kubenswrapper[4779]: I0929 19:09:16.431906 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:16 crc kubenswrapper[4779]: I0929 19:09:16.431919 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:16 crc kubenswrapper[4779]: I0929 19:09:16.431928 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:16Z","lastTransitionTime":"2025-09-29T19:09:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:16 crc kubenswrapper[4779]: I0929 19:09:16.534502 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:16 crc kubenswrapper[4779]: I0929 19:09:16.534565 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:16 crc kubenswrapper[4779]: I0929 19:09:16.534575 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:16 crc kubenswrapper[4779]: I0929 19:09:16.534590 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:16 crc kubenswrapper[4779]: I0929 19:09:16.534601 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:16Z","lastTransitionTime":"2025-09-29T19:09:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:16 crc kubenswrapper[4779]: I0929 19:09:16.544896 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/4df079c4-34e3-4132-91bb-ad68488552f8-metrics-certs\") pod \"network-metrics-daemon-2rtwf\" (UID: \"4df079c4-34e3-4132-91bb-ad68488552f8\") " pod="openshift-multus/network-metrics-daemon-2rtwf" Sep 29 19:09:16 crc kubenswrapper[4779]: E0929 19:09:16.545004 4779 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Sep 29 19:09:16 crc kubenswrapper[4779]: E0929 19:09:16.545047 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/4df079c4-34e3-4132-91bb-ad68488552f8-metrics-certs podName:4df079c4-34e3-4132-91bb-ad68488552f8 nodeName:}" failed. No retries permitted until 2025-09-29 19:09:48.545035048 +0000 UTC m=+99.429460148 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/4df079c4-34e3-4132-91bb-ad68488552f8-metrics-certs") pod "network-metrics-daemon-2rtwf" (UID: "4df079c4-34e3-4132-91bb-ad68488552f8") : object "openshift-multus"/"metrics-daemon-secret" not registered Sep 29 19:09:16 crc kubenswrapper[4779]: I0929 19:09:16.637261 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:16 crc kubenswrapper[4779]: I0929 19:09:16.637300 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:16 crc kubenswrapper[4779]: I0929 19:09:16.637331 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:16 crc kubenswrapper[4779]: I0929 19:09:16.637357 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:16 crc kubenswrapper[4779]: I0929 19:09:16.637369 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:16Z","lastTransitionTime":"2025-09-29T19:09:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:16 crc kubenswrapper[4779]: I0929 19:09:16.740056 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:16 crc kubenswrapper[4779]: I0929 19:09:16.740096 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:16 crc kubenswrapper[4779]: I0929 19:09:16.740106 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:16 crc kubenswrapper[4779]: I0929 19:09:16.740118 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:16 crc kubenswrapper[4779]: I0929 19:09:16.740129 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:16Z","lastTransitionTime":"2025-09-29T19:09:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:16 crc kubenswrapper[4779]: I0929 19:09:16.765986 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 19:09:16 crc kubenswrapper[4779]: I0929 19:09:16.765998 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 19:09:16 crc kubenswrapper[4779]: E0929 19:09:16.766194 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 19:09:16 crc kubenswrapper[4779]: I0929 19:09:16.766222 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-2rtwf" Sep 29 19:09:16 crc kubenswrapper[4779]: I0929 19:09:16.766303 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 19:09:16 crc kubenswrapper[4779]: E0929 19:09:16.766709 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 19:09:16 crc kubenswrapper[4779]: I0929 19:09:16.766906 4779 scope.go:117] "RemoveContainer" containerID="23219c41b54182e6c1c57246636336210c37b9a2c9b3beedbb57daee3d909433" Sep 29 19:09:16 crc kubenswrapper[4779]: E0929 19:09:16.767024 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-2rtwf" podUID="4df079c4-34e3-4132-91bb-ad68488552f8" Sep 29 19:09:16 crc kubenswrapper[4779]: E0929 19:09:16.767031 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-42vjg_openshift-ovn-kubernetes(046df2ef-fb75-4d32-93e6-17b36af0a7c2)\"" pod="openshift-ovn-kubernetes/ovnkube-node-42vjg" podUID="046df2ef-fb75-4d32-93e6-17b36af0a7c2" Sep 29 19:09:16 crc kubenswrapper[4779]: E0929 19:09:16.766906 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 19:09:16 crc kubenswrapper[4779]: I0929 19:09:16.842935 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:16 crc kubenswrapper[4779]: I0929 19:09:16.842973 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:16 crc kubenswrapper[4779]: I0929 19:09:16.842982 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:16 crc kubenswrapper[4779]: I0929 19:09:16.843000 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:16 crc kubenswrapper[4779]: I0929 19:09:16.843014 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:16Z","lastTransitionTime":"2025-09-29T19:09:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:16 crc kubenswrapper[4779]: I0929 19:09:16.946013 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:16 crc kubenswrapper[4779]: I0929 19:09:16.946062 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:16 crc kubenswrapper[4779]: I0929 19:09:16.946080 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:16 crc kubenswrapper[4779]: I0929 19:09:16.946105 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:16 crc kubenswrapper[4779]: I0929 19:09:16.946122 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:16Z","lastTransitionTime":"2025-09-29T19:09:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:17 crc kubenswrapper[4779]: I0929 19:09:17.048554 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:17 crc kubenswrapper[4779]: I0929 19:09:17.048609 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:17 crc kubenswrapper[4779]: I0929 19:09:17.048620 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:17 crc kubenswrapper[4779]: I0929 19:09:17.048636 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:17 crc kubenswrapper[4779]: I0929 19:09:17.048647 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:17Z","lastTransitionTime":"2025-09-29T19:09:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:17 crc kubenswrapper[4779]: I0929 19:09:17.118698 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:17 crc kubenswrapper[4779]: I0929 19:09:17.118744 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:17 crc kubenswrapper[4779]: I0929 19:09:17.118755 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:17 crc kubenswrapper[4779]: I0929 19:09:17.118769 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:17 crc kubenswrapper[4779]: I0929 19:09:17.118780 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:17Z","lastTransitionTime":"2025-09-29T19:09:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:17 crc kubenswrapper[4779]: E0929 19:09:17.131512 4779 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T19:09:17Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T19:09:17Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T19:09:17Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T19:09:17Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T19:09:17Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T19:09:17Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T19:09:17Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T19:09:17Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c0f08dfd-4d0c-4b55-a30c-6725bfe13689\\\",\\\"systemUUID\\\":\\\"d61591a8-214b-4be1-8c58-e9ade5216b62\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:09:17Z is after 2025-08-24T17:21:41Z" Sep 29 19:09:17 crc kubenswrapper[4779]: I0929 19:09:17.135433 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:17 crc kubenswrapper[4779]: I0929 19:09:17.135500 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:17 crc kubenswrapper[4779]: I0929 19:09:17.135511 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:17 crc kubenswrapper[4779]: I0929 19:09:17.135536 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:17 crc kubenswrapper[4779]: I0929 19:09:17.135548 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:17Z","lastTransitionTime":"2025-09-29T19:09:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:17 crc kubenswrapper[4779]: E0929 19:09:17.146981 4779 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T19:09:17Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T19:09:17Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T19:09:17Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T19:09:17Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T19:09:17Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T19:09:17Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T19:09:17Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T19:09:17Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c0f08dfd-4d0c-4b55-a30c-6725bfe13689\\\",\\\"systemUUID\\\":\\\"d61591a8-214b-4be1-8c58-e9ade5216b62\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:09:17Z is after 2025-08-24T17:21:41Z" Sep 29 19:09:17 crc kubenswrapper[4779]: I0929 19:09:17.150655 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:17 crc kubenswrapper[4779]: I0929 19:09:17.150684 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:17 crc kubenswrapper[4779]: I0929 19:09:17.150692 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:17 crc kubenswrapper[4779]: I0929 19:09:17.150704 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:17 crc kubenswrapper[4779]: I0929 19:09:17.150713 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:17Z","lastTransitionTime":"2025-09-29T19:09:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:17 crc kubenswrapper[4779]: E0929 19:09:17.167511 4779 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T19:09:17Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T19:09:17Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T19:09:17Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T19:09:17Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T19:09:17Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T19:09:17Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T19:09:17Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T19:09:17Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c0f08dfd-4d0c-4b55-a30c-6725bfe13689\\\",\\\"systemUUID\\\":\\\"d61591a8-214b-4be1-8c58-e9ade5216b62\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:09:17Z is after 2025-08-24T17:21:41Z" Sep 29 19:09:17 crc kubenswrapper[4779]: I0929 19:09:17.171476 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:17 crc kubenswrapper[4779]: I0929 19:09:17.171514 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:17 crc kubenswrapper[4779]: I0929 19:09:17.171531 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:17 crc kubenswrapper[4779]: I0929 19:09:17.171553 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:17 crc kubenswrapper[4779]: I0929 19:09:17.171569 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:17Z","lastTransitionTime":"2025-09-29T19:09:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:17 crc kubenswrapper[4779]: E0929 19:09:17.184559 4779 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T19:09:17Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T19:09:17Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T19:09:17Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T19:09:17Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T19:09:17Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T19:09:17Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T19:09:17Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T19:09:17Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c0f08dfd-4d0c-4b55-a30c-6725bfe13689\\\",\\\"systemUUID\\\":\\\"d61591a8-214b-4be1-8c58-e9ade5216b62\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:09:17Z is after 2025-08-24T17:21:41Z" Sep 29 19:09:17 crc kubenswrapper[4779]: I0929 19:09:17.189106 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:17 crc kubenswrapper[4779]: I0929 19:09:17.189196 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:17 crc kubenswrapper[4779]: I0929 19:09:17.189218 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:17 crc kubenswrapper[4779]: I0929 19:09:17.189242 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:17 crc kubenswrapper[4779]: I0929 19:09:17.189259 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:17Z","lastTransitionTime":"2025-09-29T19:09:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:17 crc kubenswrapper[4779]: E0929 19:09:17.203181 4779 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T19:09:17Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T19:09:17Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T19:09:17Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T19:09:17Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T19:09:17Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T19:09:17Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T19:09:17Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T19:09:17Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c0f08dfd-4d0c-4b55-a30c-6725bfe13689\\\",\\\"systemUUID\\\":\\\"d61591a8-214b-4be1-8c58-e9ade5216b62\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:09:17Z is after 2025-08-24T17:21:41Z" Sep 29 19:09:17 crc kubenswrapper[4779]: E0929 19:09:17.203289 4779 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Sep 29 19:09:17 crc kubenswrapper[4779]: I0929 19:09:17.204568 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:17 crc kubenswrapper[4779]: I0929 19:09:17.204623 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:17 crc kubenswrapper[4779]: I0929 19:09:17.204658 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:17 crc kubenswrapper[4779]: I0929 19:09:17.204678 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:17 crc kubenswrapper[4779]: I0929 19:09:17.204690 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:17Z","lastTransitionTime":"2025-09-29T19:09:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:17 crc kubenswrapper[4779]: I0929 19:09:17.307654 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:17 crc kubenswrapper[4779]: I0929 19:09:17.307690 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:17 crc kubenswrapper[4779]: I0929 19:09:17.307701 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:17 crc kubenswrapper[4779]: I0929 19:09:17.307719 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:17 crc kubenswrapper[4779]: I0929 19:09:17.307736 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:17Z","lastTransitionTime":"2025-09-29T19:09:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:17 crc kubenswrapper[4779]: I0929 19:09:17.409847 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:17 crc kubenswrapper[4779]: I0929 19:09:17.409896 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:17 crc kubenswrapper[4779]: I0929 19:09:17.409909 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:17 crc kubenswrapper[4779]: I0929 19:09:17.409926 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:17 crc kubenswrapper[4779]: I0929 19:09:17.409938 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:17Z","lastTransitionTime":"2025-09-29T19:09:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:17 crc kubenswrapper[4779]: I0929 19:09:17.513011 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:17 crc kubenswrapper[4779]: I0929 19:09:17.513088 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:17 crc kubenswrapper[4779]: I0929 19:09:17.513111 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:17 crc kubenswrapper[4779]: I0929 19:09:17.513146 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:17 crc kubenswrapper[4779]: I0929 19:09:17.513170 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:17Z","lastTransitionTime":"2025-09-29T19:09:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:17 crc kubenswrapper[4779]: I0929 19:09:17.616287 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:17 crc kubenswrapper[4779]: I0929 19:09:17.616334 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:17 crc kubenswrapper[4779]: I0929 19:09:17.616342 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:17 crc kubenswrapper[4779]: I0929 19:09:17.616355 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:17 crc kubenswrapper[4779]: I0929 19:09:17.616363 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:17Z","lastTransitionTime":"2025-09-29T19:09:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:17 crc kubenswrapper[4779]: I0929 19:09:17.719660 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:17 crc kubenswrapper[4779]: I0929 19:09:17.719738 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:17 crc kubenswrapper[4779]: I0929 19:09:17.719762 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:17 crc kubenswrapper[4779]: I0929 19:09:17.719792 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:17 crc kubenswrapper[4779]: I0929 19:09:17.719815 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:17Z","lastTransitionTime":"2025-09-29T19:09:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:17 crc kubenswrapper[4779]: I0929 19:09:17.822673 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:17 crc kubenswrapper[4779]: I0929 19:09:17.822709 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:17 crc kubenswrapper[4779]: I0929 19:09:17.822720 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:17 crc kubenswrapper[4779]: I0929 19:09:17.822735 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:17 crc kubenswrapper[4779]: I0929 19:09:17.822774 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:17Z","lastTransitionTime":"2025-09-29T19:09:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:17 crc kubenswrapper[4779]: I0929 19:09:17.924900 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:17 crc kubenswrapper[4779]: I0929 19:09:17.924945 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:17 crc kubenswrapper[4779]: I0929 19:09:17.924960 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:17 crc kubenswrapper[4779]: I0929 19:09:17.924974 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:17 crc kubenswrapper[4779]: I0929 19:09:17.924983 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:17Z","lastTransitionTime":"2025-09-29T19:09:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:18 crc kubenswrapper[4779]: I0929 19:09:18.028763 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:18 crc kubenswrapper[4779]: I0929 19:09:18.028808 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:18 crc kubenswrapper[4779]: I0929 19:09:18.028844 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:18 crc kubenswrapper[4779]: I0929 19:09:18.028862 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:18 crc kubenswrapper[4779]: I0929 19:09:18.028876 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:18Z","lastTransitionTime":"2025-09-29T19:09:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:18 crc kubenswrapper[4779]: I0929 19:09:18.131685 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:18 crc kubenswrapper[4779]: I0929 19:09:18.131742 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:18 crc kubenswrapper[4779]: I0929 19:09:18.131759 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:18 crc kubenswrapper[4779]: I0929 19:09:18.131783 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:18 crc kubenswrapper[4779]: I0929 19:09:18.131802 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:18Z","lastTransitionTime":"2025-09-29T19:09:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:18 crc kubenswrapper[4779]: I0929 19:09:18.235116 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:18 crc kubenswrapper[4779]: I0929 19:09:18.235168 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:18 crc kubenswrapper[4779]: I0929 19:09:18.235184 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:18 crc kubenswrapper[4779]: I0929 19:09:18.235208 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:18 crc kubenswrapper[4779]: I0929 19:09:18.235224 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:18Z","lastTransitionTime":"2025-09-29T19:09:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:18 crc kubenswrapper[4779]: I0929 19:09:18.337191 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:18 crc kubenswrapper[4779]: I0929 19:09:18.337228 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:18 crc kubenswrapper[4779]: I0929 19:09:18.337239 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:18 crc kubenswrapper[4779]: I0929 19:09:18.337255 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:18 crc kubenswrapper[4779]: I0929 19:09:18.337263 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:18Z","lastTransitionTime":"2025-09-29T19:09:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:18 crc kubenswrapper[4779]: I0929 19:09:18.439969 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:18 crc kubenswrapper[4779]: I0929 19:09:18.440009 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:18 crc kubenswrapper[4779]: I0929 19:09:18.440018 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:18 crc kubenswrapper[4779]: I0929 19:09:18.440032 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:18 crc kubenswrapper[4779]: I0929 19:09:18.440040 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:18Z","lastTransitionTime":"2025-09-29T19:09:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:18 crc kubenswrapper[4779]: I0929 19:09:18.543442 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:18 crc kubenswrapper[4779]: I0929 19:09:18.543636 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:18 crc kubenswrapper[4779]: I0929 19:09:18.543722 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:18 crc kubenswrapper[4779]: I0929 19:09:18.543782 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:18 crc kubenswrapper[4779]: I0929 19:09:18.543925 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:18Z","lastTransitionTime":"2025-09-29T19:09:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:18 crc kubenswrapper[4779]: I0929 19:09:18.647263 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:18 crc kubenswrapper[4779]: I0929 19:09:18.647343 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:18 crc kubenswrapper[4779]: I0929 19:09:18.647363 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:18 crc kubenswrapper[4779]: I0929 19:09:18.647387 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:18 crc kubenswrapper[4779]: I0929 19:09:18.647404 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:18Z","lastTransitionTime":"2025-09-29T19:09:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:18 crc kubenswrapper[4779]: I0929 19:09:18.749931 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:18 crc kubenswrapper[4779]: I0929 19:09:18.750053 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:18 crc kubenswrapper[4779]: I0929 19:09:18.750062 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:18 crc kubenswrapper[4779]: I0929 19:09:18.750077 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:18 crc kubenswrapper[4779]: I0929 19:09:18.750094 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:18Z","lastTransitionTime":"2025-09-29T19:09:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:18 crc kubenswrapper[4779]: I0929 19:09:18.765551 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 19:09:18 crc kubenswrapper[4779]: I0929 19:09:18.765570 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 19:09:18 crc kubenswrapper[4779]: I0929 19:09:18.765578 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 19:09:18 crc kubenswrapper[4779]: I0929 19:09:18.765558 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-2rtwf" Sep 29 19:09:18 crc kubenswrapper[4779]: E0929 19:09:18.765654 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 19:09:18 crc kubenswrapper[4779]: E0929 19:09:18.765730 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 19:09:18 crc kubenswrapper[4779]: E0929 19:09:18.765797 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-2rtwf" podUID="4df079c4-34e3-4132-91bb-ad68488552f8" Sep 29 19:09:18 crc kubenswrapper[4779]: E0929 19:09:18.765904 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 19:09:18 crc kubenswrapper[4779]: I0929 19:09:18.853028 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:18 crc kubenswrapper[4779]: I0929 19:09:18.853075 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:18 crc kubenswrapper[4779]: I0929 19:09:18.853085 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:18 crc kubenswrapper[4779]: I0929 19:09:18.853099 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:18 crc kubenswrapper[4779]: I0929 19:09:18.853110 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:18Z","lastTransitionTime":"2025-09-29T19:09:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:18 crc kubenswrapper[4779]: I0929 19:09:18.954940 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:18 crc kubenswrapper[4779]: I0929 19:09:18.955001 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:18 crc kubenswrapper[4779]: I0929 19:09:18.955014 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:18 crc kubenswrapper[4779]: I0929 19:09:18.955032 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:18 crc kubenswrapper[4779]: I0929 19:09:18.955045 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:18Z","lastTransitionTime":"2025-09-29T19:09:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:19 crc kubenswrapper[4779]: I0929 19:09:19.058196 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:19 crc kubenswrapper[4779]: I0929 19:09:19.058261 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:19 crc kubenswrapper[4779]: I0929 19:09:19.058271 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:19 crc kubenswrapper[4779]: I0929 19:09:19.058291 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:19 crc kubenswrapper[4779]: I0929 19:09:19.058302 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:19Z","lastTransitionTime":"2025-09-29T19:09:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:19 crc kubenswrapper[4779]: I0929 19:09:19.161072 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:19 crc kubenswrapper[4779]: I0929 19:09:19.161134 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:19 crc kubenswrapper[4779]: I0929 19:09:19.161151 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:19 crc kubenswrapper[4779]: I0929 19:09:19.161172 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:19 crc kubenswrapper[4779]: I0929 19:09:19.161185 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:19Z","lastTransitionTime":"2025-09-29T19:09:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:19 crc kubenswrapper[4779]: I0929 19:09:19.180009 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-jfbb6_3ac24bbf-c37a-4253-be71-8d8f15cfd48e/kube-multus/0.log" Sep 29 19:09:19 crc kubenswrapper[4779]: I0929 19:09:19.180071 4779 generic.go:334] "Generic (PLEG): container finished" podID="3ac24bbf-c37a-4253-be71-8d8f15cfd48e" containerID="2ea6959ae00f851559cc059498a3a7848fbbb55f6a25ed82e38efe62a7ee85ca" exitCode=1 Sep 29 19:09:19 crc kubenswrapper[4779]: I0929 19:09:19.180110 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-jfbb6" event={"ID":"3ac24bbf-c37a-4253-be71-8d8f15cfd48e","Type":"ContainerDied","Data":"2ea6959ae00f851559cc059498a3a7848fbbb55f6a25ed82e38efe62a7ee85ca"} Sep 29 19:09:19 crc kubenswrapper[4779]: I0929 19:09:19.180607 4779 scope.go:117] "RemoveContainer" containerID="2ea6959ae00f851559cc059498a3a7848fbbb55f6a25ed82e38efe62a7ee85ca" Sep 29 19:09:19 crc kubenswrapper[4779]: I0929 19:09:19.197451 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-7hb2m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"45b89d12-bbd2-4b47-815d-a7421cc1aa00\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://002d12663649198e6136f3e9df16a3b1cd8c64906bf39bde03cd0dde8c36abbc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bvn4g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:30Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-7hb2m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:09:19Z is after 2025-08-24T17:21:41Z" Sep 29 19:09:19 crc kubenswrapper[4779]: I0929 19:09:19.215136 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:09:19Z is after 2025-08-24T17:21:41Z" Sep 29 19:09:19 crc kubenswrapper[4779]: I0929 19:09:19.228470 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fvdtc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"97c4781c-2d4b-4eab-96fb-39a342c2d4a0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c2179d9cf26eb6d6fcc63666d3dd53e9a1a66cf633c9fe0b29e6d6ac726c8119\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7cxcv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3aeed56672d12f2355eda710f9021c6271debfaf1f7b7d6b1ff5f4ecc6764e2e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7cxcv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:43Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-fvdtc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:09:19Z is after 2025-08-24T17:21:41Z" Sep 29 19:09:19 crc kubenswrapper[4779]: I0929 19:09:19.239293 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"476bc421-1113-455e-bcc8-e207e47dad19\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://30844145987b3dae28d2ca3f75a36e21c27216b267ff40fdb83094b8045403c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tb7zk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7168ecfffda2ef4f7782650466ccebb8abcff3293fcd33d44eb4ee24abf6339c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tb7zk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:30Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-d5cnr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:09:19Z is after 2025-08-24T17:21:41Z" Sep 29 19:09:19 crc kubenswrapper[4779]: I0929 19:09:19.252843 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-2rtwf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4df079c4-34e3-4132-91bb-ad68488552f8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7sr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7sr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:44Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-2rtwf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:09:19Z is after 2025-08-24T17:21:41Z" Sep 29 19:09:19 crc kubenswrapper[4779]: I0929 19:09:19.271277 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:09:19Z is after 2025-08-24T17:21:41Z" Sep 29 19:09:19 crc kubenswrapper[4779]: I0929 19:09:19.277831 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:19 crc kubenswrapper[4779]: I0929 19:09:19.277862 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:19 crc kubenswrapper[4779]: I0929 19:09:19.277871 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:19 crc kubenswrapper[4779]: I0929 19:09:19.277886 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:19 crc kubenswrapper[4779]: I0929 19:09:19.277896 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:19Z","lastTransitionTime":"2025-09-29T19:09:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:19 crc kubenswrapper[4779]: I0929 19:09:19.301445 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://04d9dd8d319497dd3ed7c565217d8b9d228a1952a3cd0e8dee3cdad781c5770a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:09:19Z is after 2025-08-24T17:21:41Z" Sep 29 19:09:19 crc kubenswrapper[4779]: I0929 19:09:19.320561 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://15b9c6a3d7c79b30673cd8040a9f3033e2405cf2e29b38d8ba2bee9cbb384794\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:09:19Z is after 2025-08-24T17:21:41Z" Sep 29 19:09:19 crc kubenswrapper[4779]: I0929 19:09:19.338815 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jfbb6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3ac24bbf-c37a-4253-be71-8d8f15cfd48e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:09:19Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:09:19Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2ea6959ae00f851559cc059498a3a7848fbbb55f6a25ed82e38efe62a7ee85ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2ea6959ae00f851559cc059498a3a7848fbbb55f6a25ed82e38efe62a7ee85ca\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T19:09:18Z\\\",\\\"message\\\":\\\"2025-09-29T19:08:32+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_88611c75-0338-4025-8d9a-c7a4eb6f87da\\\\n2025-09-29T19:08:32+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_88611c75-0338-4025-8d9a-c7a4eb6f87da to /host/opt/cni/bin/\\\\n2025-09-29T19:08:33Z [verbose] multus-daemon started\\\\n2025-09-29T19:08:33Z [verbose] Readiness Indicator file check\\\\n2025-09-29T19:09:18Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8ckjq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:30Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jfbb6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:09:19Z is after 2025-08-24T17:21:41Z" Sep 29 19:09:19 crc kubenswrapper[4779]: I0929 19:09:19.353397 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-cvx8m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"54a33b8e-b623-4f91-be1d-a38dfcef17d7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://49599fb872eb458cb3fd77da63518f77653f671e82a8fc1db5f9e2b64e3c4d1d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e697e7c54a1b8adfde2dfa86399bfd13d895196c12108ac625bd9b94ad913d1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e697e7c54a1b8adfde2dfa86399bfd13d895196c12108ac625bd9b94ad913d1e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://75932bd27c986408eb2a3ec9aefffa392bf1ede4be0ddeabb9fdcd5fcc56199d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://75932bd27c986408eb2a3ec9aefffa392bf1ede4be0ddeabb9fdcd5fcc56199d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e058271bf65bb6f216298a9fc58b97c76335275829be7e5bec59d92688e83a05\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e058271bf65bb6f216298a9fc58b97c76335275829be7e5bec59d92688e83a05\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://edec2f7ae27e72dcc2eac62281dc9b866cdc31ecbda5e24020043aad86a44784\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://edec2f7ae27e72dcc2eac62281dc9b866cdc31ecbda5e24020043aad86a44784\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1750034ffcb61628b437e7b4f604be66ea6401d0ddb8a70f37fbb358c261670f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1750034ffcb61628b437e7b4f604be66ea6401d0ddb8a70f37fbb358c261670f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3409883fc5d9ed6b5e08d50c9d2821a4ac0b8932c9a0296c12065b0abb0354d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3409883fc5d9ed6b5e08d50c9d2821a4ac0b8932c9a0296c12065b0abb0354d6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:30Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-cvx8m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:09:19Z is after 2025-08-24T17:21:41Z" Sep 29 19:09:19 crc kubenswrapper[4779]: I0929 19:09:19.365337 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"49dd46af-a7fa-47b0-94ab-c9a999564fca\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a67efc46aeb5956568b680b4041d83a55c0410ccefdb4e43b95503c6416e1dfc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2253a3406f6de9463c1c615ad2ac76ce935067baa57b260f18afdb8d9e639e95\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa9b804f65dc9265342d6ec10b8cdd31d49f692289c7fd8292944e25a2c95ebe\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://02428a90ed2dd1c36d800ffda10ed8759d92a87ec8f467cdf9fb11511b9b9420\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:09Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:09:19Z is after 2025-08-24T17:21:41Z" Sep 29 19:09:19 crc kubenswrapper[4779]: I0929 19:09:19.379912 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ef5a50f788b42b32f4ec04eedc9f5c9dbcd07f64b430ec684d8a91b414e6c18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb910a5c408a63f6c4391cf2fae320e14d6dc945125b53eefde3b666ef9d0754\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:09:19Z is after 2025-08-24T17:21:41Z" Sep 29 19:09:19 crc kubenswrapper[4779]: I0929 19:09:19.380158 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:19 crc kubenswrapper[4779]: I0929 19:09:19.380172 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:19 crc kubenswrapper[4779]: I0929 19:09:19.380181 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:19 crc kubenswrapper[4779]: I0929 19:09:19.380195 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:19 crc kubenswrapper[4779]: I0929 19:09:19.380202 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:19Z","lastTransitionTime":"2025-09-29T19:09:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:19 crc kubenswrapper[4779]: I0929 19:09:19.403023 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-42vjg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"046df2ef-fb75-4d32-93e6-17b36af0a7c2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://629d703a556665e86ce3b0a5af995157440c71c8ab7d29b9259865cd624543c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1555e96e70a9119cf2298d91512b1d8cffb303b4327d0e20bdf686e7e36e4461\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0d18ddf7528c789be80d32cd6921ada5ed114d01130b7be97e73ec6d9168267e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b6ffbcb52a6e250cd4b4bc4e8c089ef94babaaf0db836fec9f477450464eebe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://76304a56be996b646d706c0e7df995bb1518962396c7558304b1627e5c215c11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6dcd0277739f6d5e5c90c617485f053c1c3996192f15b081c660741987ec2611\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://23219c41b54182e6c1c57246636336210c37b9a2c9b3beedbb57daee3d909433\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://23219c41b54182e6c1c57246636336210c37b9a2c9b3beedbb57daee3d909433\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T19:09:03Z\\\",\\\"message\\\":\\\"r network=default : 881.553µs\\\\nI0929 19:09:03.467164 6425 obj_retry.go:303] Retry object setup: *v1.Pod openshift-network-operator/network-operator-58b4c7f79c-55gtf\\\\nI0929 19:09:03.466800 6425 services_controller.go:356] Processing sync for service openshift-dns-operator/metrics for network=default\\\\nI0929 19:09:03.467178 6425 obj_retry.go:365] Adding new object: *v1.Pod openshift-network-operator/network-operator-58b4c7f79c-55gtf\\\\nI0929 19:09:03.467185 6425 ovn.go:134] Ensuring zone local for Pod openshift-network-operator/network-operator-58b4c7f79c-55gtf in node crc\\\\nI0929 19:09:03.467190 6425 obj_retry.go:386] Retry successful for *v1.Pod openshift-network-operator/network-operator-58b4c7f79c-55gtf after 0 failed attempt(s)\\\\nI0929 19:09:03.467195 6425 default_network_controller.go:776] Recording success event on pod openshift-network-operator/network-operator-58b4c7f79c-55gtf\\\\nI0929 19:09:03.466832 6425 services_controller.go:434] Service default/kubernetes retrieved from lister for network=default: \\\\u0026Service{ObjectMeta:{kubernetes default 1fcaffea-cfe2-4295-9c2a-a3b3626fb3f1 259 0 2025-02-23 05:11:12 +0000 UTC \\\\u003cnil\\\\u003e \\\\u003cnil\\\\u003e map[component:apiserver provider:kubernetes] map[] [] [] []},Spec:ServiceSpec{Ports:[]ServicePort{ServicePort{Name:https,Protocol:TCP,Port:443,TargetPort:{0 6443 },NodePort:0,AppProtocol:nil,},},Selector:m\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T19:09:02Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-42vjg_openshift-ovn-kubernetes(046df2ef-fb75-4d32-93e6-17b36af0a7c2)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c8993f5be0b77b752f60e7346f609aaeef20db2f0e72cbebde06a11fa7d2f5bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e1e03c63d08ebe2a4b76954140684a5b43da9e92b93490aa7efa715fc2aefb4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e1e03c63d08ebe2a4b76954140684a5b43da9e92b93490aa7efa715fc2aefb4c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:30Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-42vjg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:09:19Z is after 2025-08-24T17:21:41Z" Sep 29 19:09:19 crc kubenswrapper[4779]: I0929 19:09:19.412826 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-zxpg4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e0db9b28-7e3a-4d44-9e98-1a07c8e5b8d6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2f0f837dfb05912fb4f7b0cb08cdb363cb3bfe2cc91a0bebd1b2b2b8128d78cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6ghv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:33Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-zxpg4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:09:19Z is after 2025-08-24T17:21:41Z" Sep 29 19:09:19 crc kubenswrapper[4779]: I0929 19:09:19.426902 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"35c2e46d-85f5-4dbc-b75c-ff3cab6cc941\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:09:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:09:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://341b30347851b6ce8681296d5622e32df85756f3a372075489023227f0b7828b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://72b9d027e1821e0de6cae982b60001ce62087f205bf5553d9d65c6cfe2d6e0c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://703b4d9d7a59ba642408a7920fd8c2753f1474f28acaa371e0a6b90c75bb7e91\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6fcb83ea7913513031d2c874a33ec806cf823bf8e3838378db3d9c94ca9229a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6fcb83ea7913513031d2c874a33ec806cf823bf8e3838378db3d9c94ca9229a6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:10Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:09Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:09:19Z is after 2025-08-24T17:21:41Z" Sep 29 19:09:19 crc kubenswrapper[4779]: I0929 19:09:19.444844 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2f701cb-2e7d-4cea-8455-f68605964ac6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:09:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:09:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dfa32890cde2a0a47211239f797f83380b4418ad2b8fb1560c779705e68df39c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a017f2621de03e0184a0a168819a6762e4b36a1ae5c66a6bcfd1d65266fcb52\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a9691127cc0437ceebef7a82219adb83625690db4d6bf5cf1c8177a6a9e1284\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://19af6b603d6261471bf10fbf65244aa6842b15a02fbbdea09a8ab2d01ec1e1a4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b14ae0b08f077b3acbe802e4a5ec06dd10075100c37d5587119a09a92b4651a\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T19:08:29Z\\\",\\\"message\\\":\\\"le observer\\\\nW0929 19:08:29.408025 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0929 19:08:29.408423 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 19:08:29.411188 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1038548135/tls.crt::/tmp/serving-cert-1038548135/tls.key\\\\\\\"\\\\nI0929 19:08:29.848353 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0929 19:08:29.866865 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0929 19:08:29.866980 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0929 19:08:29.867032 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0929 19:08:29.867061 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0929 19:08:29.872666 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0929 19:08:29.872766 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0929 19:08:29.872793 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0929 19:08:29.872817 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0929 19:08:29.872839 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0929 19:08:29.872861 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0929 19:08:29.872883 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0929 19:08:29.873077 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0929 19:08:29.875629 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:24Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f0916d37ad679d190403229b40390af2860e24f777a07cd5ef0ac2f7d3fc3c41\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5a737775c31cf8dc37838d904ee00f1075bc2aa52bea63587806cb7486950eaf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5a737775c31cf8dc37838d904ee00f1075bc2aa52bea63587806cb7486950eaf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:09Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:09:19Z is after 2025-08-24T17:21:41Z" Sep 29 19:09:19 crc kubenswrapper[4779]: I0929 19:09:19.460143 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:09:19Z is after 2025-08-24T17:21:41Z" Sep 29 19:09:19 crc kubenswrapper[4779]: I0929 19:09:19.483238 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:19 crc kubenswrapper[4779]: I0929 19:09:19.483301 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:19 crc kubenswrapper[4779]: I0929 19:09:19.483358 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:19 crc kubenswrapper[4779]: I0929 19:09:19.483390 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:19 crc kubenswrapper[4779]: I0929 19:09:19.483413 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:19Z","lastTransitionTime":"2025-09-29T19:09:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:19 crc kubenswrapper[4779]: I0929 19:09:19.585840 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:19 crc kubenswrapper[4779]: I0929 19:09:19.585913 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:19 crc kubenswrapper[4779]: I0929 19:09:19.585929 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:19 crc kubenswrapper[4779]: I0929 19:09:19.585955 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:19 crc kubenswrapper[4779]: I0929 19:09:19.585972 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:19Z","lastTransitionTime":"2025-09-29T19:09:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:19 crc kubenswrapper[4779]: I0929 19:09:19.689225 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:19 crc kubenswrapper[4779]: I0929 19:09:19.689278 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:19 crc kubenswrapper[4779]: I0929 19:09:19.689288 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:19 crc kubenswrapper[4779]: I0929 19:09:19.689306 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:19 crc kubenswrapper[4779]: I0929 19:09:19.689373 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:19Z","lastTransitionTime":"2025-09-29T19:09:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:19 crc kubenswrapper[4779]: I0929 19:09:19.784800 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"35c2e46d-85f5-4dbc-b75c-ff3cab6cc941\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:09:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:09:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://341b30347851b6ce8681296d5622e32df85756f3a372075489023227f0b7828b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://72b9d027e1821e0de6cae982b60001ce62087f205bf5553d9d65c6cfe2d6e0c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://703b4d9d7a59ba642408a7920fd8c2753f1474f28acaa371e0a6b90c75bb7e91\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6fcb83ea7913513031d2c874a33ec806cf823bf8e3838378db3d9c94ca9229a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6fcb83ea7913513031d2c874a33ec806cf823bf8e3838378db3d9c94ca9229a6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:10Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:09Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:09:19Z is after 2025-08-24T17:21:41Z" Sep 29 19:09:19 crc kubenswrapper[4779]: I0929 19:09:19.793188 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:19 crc kubenswrapper[4779]: I0929 19:09:19.793256 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:19 crc kubenswrapper[4779]: I0929 19:09:19.793279 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:19 crc kubenswrapper[4779]: I0929 19:09:19.793445 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:19 crc kubenswrapper[4779]: I0929 19:09:19.793472 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:19Z","lastTransitionTime":"2025-09-29T19:09:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:19 crc kubenswrapper[4779]: I0929 19:09:19.803066 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2f701cb-2e7d-4cea-8455-f68605964ac6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:09:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:09:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dfa32890cde2a0a47211239f797f83380b4418ad2b8fb1560c779705e68df39c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a017f2621de03e0184a0a168819a6762e4b36a1ae5c66a6bcfd1d65266fcb52\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a9691127cc0437ceebef7a82219adb83625690db4d6bf5cf1c8177a6a9e1284\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://19af6b603d6261471bf10fbf65244aa6842b15a02fbbdea09a8ab2d01ec1e1a4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b14ae0b08f077b3acbe802e4a5ec06dd10075100c37d5587119a09a92b4651a\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T19:08:29Z\\\",\\\"message\\\":\\\"le observer\\\\nW0929 19:08:29.408025 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0929 19:08:29.408423 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 19:08:29.411188 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1038548135/tls.crt::/tmp/serving-cert-1038548135/tls.key\\\\\\\"\\\\nI0929 19:08:29.848353 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0929 19:08:29.866865 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0929 19:08:29.866980 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0929 19:08:29.867032 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0929 19:08:29.867061 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0929 19:08:29.872666 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0929 19:08:29.872766 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0929 19:08:29.872793 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0929 19:08:29.872817 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0929 19:08:29.872839 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0929 19:08:29.872861 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0929 19:08:29.872883 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0929 19:08:29.873077 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0929 19:08:29.875629 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:24Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f0916d37ad679d190403229b40390af2860e24f777a07cd5ef0ac2f7d3fc3c41\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5a737775c31cf8dc37838d904ee00f1075bc2aa52bea63587806cb7486950eaf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5a737775c31cf8dc37838d904ee00f1075bc2aa52bea63587806cb7486950eaf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:09Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:09:19Z is after 2025-08-24T17:21:41Z" Sep 29 19:09:19 crc kubenswrapper[4779]: I0929 19:09:19.815740 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:09:19Z is after 2025-08-24T17:21:41Z" Sep 29 19:09:19 crc kubenswrapper[4779]: I0929 19:09:19.838168 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-42vjg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"046df2ef-fb75-4d32-93e6-17b36af0a7c2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://629d703a556665e86ce3b0a5af995157440c71c8ab7d29b9259865cd624543c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1555e96e70a9119cf2298d91512b1d8cffb303b4327d0e20bdf686e7e36e4461\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0d18ddf7528c789be80d32cd6921ada5ed114d01130b7be97e73ec6d9168267e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b6ffbcb52a6e250cd4b4bc4e8c089ef94babaaf0db836fec9f477450464eebe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://76304a56be996b646d706c0e7df995bb1518962396c7558304b1627e5c215c11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6dcd0277739f6d5e5c90c617485f053c1c3996192f15b081c660741987ec2611\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://23219c41b54182e6c1c57246636336210c37b9a2c9b3beedbb57daee3d909433\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://23219c41b54182e6c1c57246636336210c37b9a2c9b3beedbb57daee3d909433\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T19:09:03Z\\\",\\\"message\\\":\\\"r network=default : 881.553µs\\\\nI0929 19:09:03.467164 6425 obj_retry.go:303] Retry object setup: *v1.Pod openshift-network-operator/network-operator-58b4c7f79c-55gtf\\\\nI0929 19:09:03.466800 6425 services_controller.go:356] Processing sync for service openshift-dns-operator/metrics for network=default\\\\nI0929 19:09:03.467178 6425 obj_retry.go:365] Adding new object: *v1.Pod openshift-network-operator/network-operator-58b4c7f79c-55gtf\\\\nI0929 19:09:03.467185 6425 ovn.go:134] Ensuring zone local for Pod openshift-network-operator/network-operator-58b4c7f79c-55gtf in node crc\\\\nI0929 19:09:03.467190 6425 obj_retry.go:386] Retry successful for *v1.Pod openshift-network-operator/network-operator-58b4c7f79c-55gtf after 0 failed attempt(s)\\\\nI0929 19:09:03.467195 6425 default_network_controller.go:776] Recording success event on pod openshift-network-operator/network-operator-58b4c7f79c-55gtf\\\\nI0929 19:09:03.466832 6425 services_controller.go:434] Service default/kubernetes retrieved from lister for network=default: \\\\u0026Service{ObjectMeta:{kubernetes default 1fcaffea-cfe2-4295-9c2a-a3b3626fb3f1 259 0 2025-02-23 05:11:12 +0000 UTC \\\\u003cnil\\\\u003e \\\\u003cnil\\\\u003e map[component:apiserver provider:kubernetes] map[] [] [] []},Spec:ServiceSpec{Ports:[]ServicePort{ServicePort{Name:https,Protocol:TCP,Port:443,TargetPort:{0 6443 },NodePort:0,AppProtocol:nil,},},Selector:m\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T19:09:02Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-42vjg_openshift-ovn-kubernetes(046df2ef-fb75-4d32-93e6-17b36af0a7c2)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c8993f5be0b77b752f60e7346f609aaeef20db2f0e72cbebde06a11fa7d2f5bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e1e03c63d08ebe2a4b76954140684a5b43da9e92b93490aa7efa715fc2aefb4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e1e03c63d08ebe2a4b76954140684a5b43da9e92b93490aa7efa715fc2aefb4c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:30Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-42vjg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:09:19Z is after 2025-08-24T17:21:41Z" Sep 29 19:09:19 crc kubenswrapper[4779]: I0929 19:09:19.853759 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-zxpg4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e0db9b28-7e3a-4d44-9e98-1a07c8e5b8d6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2f0f837dfb05912fb4f7b0cb08cdb363cb3bfe2cc91a0bebd1b2b2b8128d78cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6ghv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:33Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-zxpg4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:09:19Z is after 2025-08-24T17:21:41Z" Sep 29 19:09:19 crc kubenswrapper[4779]: I0929 19:09:19.867608 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-7hb2m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"45b89d12-bbd2-4b47-815d-a7421cc1aa00\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://002d12663649198e6136f3e9df16a3b1cd8c64906bf39bde03cd0dde8c36abbc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bvn4g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:30Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-7hb2m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:09:19Z is after 2025-08-24T17:21:41Z" Sep 29 19:09:19 crc kubenswrapper[4779]: I0929 19:09:19.879109 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:09:19Z is after 2025-08-24T17:21:41Z" Sep 29 19:09:19 crc kubenswrapper[4779]: I0929 19:09:19.890977 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fvdtc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"97c4781c-2d4b-4eab-96fb-39a342c2d4a0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c2179d9cf26eb6d6fcc63666d3dd53e9a1a66cf633c9fe0b29e6d6ac726c8119\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7cxcv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3aeed56672d12f2355eda710f9021c6271debfaf1f7b7d6b1ff5f4ecc6764e2e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7cxcv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:43Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-fvdtc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:09:19Z is after 2025-08-24T17:21:41Z" Sep 29 19:09:19 crc kubenswrapper[4779]: I0929 19:09:19.895879 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:19 crc kubenswrapper[4779]: I0929 19:09:19.895921 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:19 crc kubenswrapper[4779]: I0929 19:09:19.895937 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:19 crc kubenswrapper[4779]: I0929 19:09:19.895956 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:19 crc kubenswrapper[4779]: I0929 19:09:19.895967 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:19Z","lastTransitionTime":"2025-09-29T19:09:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:19 crc kubenswrapper[4779]: I0929 19:09:19.904990 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:09:19Z is after 2025-08-24T17:21:41Z" Sep 29 19:09:19 crc kubenswrapper[4779]: I0929 19:09:19.924072 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://04d9dd8d319497dd3ed7c565217d8b9d228a1952a3cd0e8dee3cdad781c5770a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:09:19Z is after 2025-08-24T17:21:41Z" Sep 29 19:09:19 crc kubenswrapper[4779]: I0929 19:09:19.939842 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"476bc421-1113-455e-bcc8-e207e47dad19\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://30844145987b3dae28d2ca3f75a36e21c27216b267ff40fdb83094b8045403c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tb7zk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7168ecfffda2ef4f7782650466ccebb8abcff3293fcd33d44eb4ee24abf6339c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tb7zk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:30Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-d5cnr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:09:19Z is after 2025-08-24T17:21:41Z" Sep 29 19:09:19 crc kubenswrapper[4779]: I0929 19:09:19.953772 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-2rtwf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4df079c4-34e3-4132-91bb-ad68488552f8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7sr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7sr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:44Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-2rtwf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:09:19Z is after 2025-08-24T17:21:41Z" Sep 29 19:09:19 crc kubenswrapper[4779]: I0929 19:09:19.969101 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"49dd46af-a7fa-47b0-94ab-c9a999564fca\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a67efc46aeb5956568b680b4041d83a55c0410ccefdb4e43b95503c6416e1dfc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2253a3406f6de9463c1c615ad2ac76ce935067baa57b260f18afdb8d9e639e95\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa9b804f65dc9265342d6ec10b8cdd31d49f692289c7fd8292944e25a2c95ebe\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://02428a90ed2dd1c36d800ffda10ed8759d92a87ec8f467cdf9fb11511b9b9420\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:09Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:09:19Z is after 2025-08-24T17:21:41Z" Sep 29 19:09:19 crc kubenswrapper[4779]: I0929 19:09:19.983993 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ef5a50f788b42b32f4ec04eedc9f5c9dbcd07f64b430ec684d8a91b414e6c18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb910a5c408a63f6c4391cf2fae320e14d6dc945125b53eefde3b666ef9d0754\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:09:19Z is after 2025-08-24T17:21:41Z" Sep 29 19:09:19 crc kubenswrapper[4779]: I0929 19:09:19.997457 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://15b9c6a3d7c79b30673cd8040a9f3033e2405cf2e29b38d8ba2bee9cbb384794\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:09:19Z is after 2025-08-24T17:21:41Z" Sep 29 19:09:19 crc kubenswrapper[4779]: I0929 19:09:19.998928 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:19 crc kubenswrapper[4779]: I0929 19:09:19.999067 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:19 crc kubenswrapper[4779]: I0929 19:09:19.999171 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:19 crc kubenswrapper[4779]: I0929 19:09:19.999266 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:19 crc kubenswrapper[4779]: I0929 19:09:19.999392 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:19Z","lastTransitionTime":"2025-09-29T19:09:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:20 crc kubenswrapper[4779]: I0929 19:09:20.010096 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jfbb6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3ac24bbf-c37a-4253-be71-8d8f15cfd48e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:09:19Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:09:19Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2ea6959ae00f851559cc059498a3a7848fbbb55f6a25ed82e38efe62a7ee85ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2ea6959ae00f851559cc059498a3a7848fbbb55f6a25ed82e38efe62a7ee85ca\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T19:09:18Z\\\",\\\"message\\\":\\\"2025-09-29T19:08:32+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_88611c75-0338-4025-8d9a-c7a4eb6f87da\\\\n2025-09-29T19:08:32+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_88611c75-0338-4025-8d9a-c7a4eb6f87da to /host/opt/cni/bin/\\\\n2025-09-29T19:08:33Z [verbose] multus-daemon started\\\\n2025-09-29T19:08:33Z [verbose] Readiness Indicator file check\\\\n2025-09-29T19:09:18Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8ckjq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:30Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jfbb6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:09:20Z is after 2025-08-24T17:21:41Z" Sep 29 19:09:20 crc kubenswrapper[4779]: I0929 19:09:20.025271 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-cvx8m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"54a33b8e-b623-4f91-be1d-a38dfcef17d7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://49599fb872eb458cb3fd77da63518f77653f671e82a8fc1db5f9e2b64e3c4d1d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e697e7c54a1b8adfde2dfa86399bfd13d895196c12108ac625bd9b94ad913d1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e697e7c54a1b8adfde2dfa86399bfd13d895196c12108ac625bd9b94ad913d1e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://75932bd27c986408eb2a3ec9aefffa392bf1ede4be0ddeabb9fdcd5fcc56199d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://75932bd27c986408eb2a3ec9aefffa392bf1ede4be0ddeabb9fdcd5fcc56199d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e058271bf65bb6f216298a9fc58b97c76335275829be7e5bec59d92688e83a05\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e058271bf65bb6f216298a9fc58b97c76335275829be7e5bec59d92688e83a05\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://edec2f7ae27e72dcc2eac62281dc9b866cdc31ecbda5e24020043aad86a44784\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://edec2f7ae27e72dcc2eac62281dc9b866cdc31ecbda5e24020043aad86a44784\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1750034ffcb61628b437e7b4f604be66ea6401d0ddb8a70f37fbb358c261670f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1750034ffcb61628b437e7b4f604be66ea6401d0ddb8a70f37fbb358c261670f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3409883fc5d9ed6b5e08d50c9d2821a4ac0b8932c9a0296c12065b0abb0354d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3409883fc5d9ed6b5e08d50c9d2821a4ac0b8932c9a0296c12065b0abb0354d6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:30Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-cvx8m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:09:20Z is after 2025-08-24T17:21:41Z" Sep 29 19:09:20 crc kubenswrapper[4779]: I0929 19:09:20.101400 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:20 crc kubenswrapper[4779]: I0929 19:09:20.101445 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:20 crc kubenswrapper[4779]: I0929 19:09:20.101455 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:20 crc kubenswrapper[4779]: I0929 19:09:20.101471 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:20 crc kubenswrapper[4779]: I0929 19:09:20.101482 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:20Z","lastTransitionTime":"2025-09-29T19:09:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:20 crc kubenswrapper[4779]: I0929 19:09:20.185308 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-jfbb6_3ac24bbf-c37a-4253-be71-8d8f15cfd48e/kube-multus/0.log" Sep 29 19:09:20 crc kubenswrapper[4779]: I0929 19:09:20.185389 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-jfbb6" event={"ID":"3ac24bbf-c37a-4253-be71-8d8f15cfd48e","Type":"ContainerStarted","Data":"0a42b5ee3be8c80c6772c05f938a0f0be5896c66157b38f8b36cc3f9e03a950d"} Sep 29 19:09:20 crc kubenswrapper[4779]: I0929 19:09:20.199775 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"49dd46af-a7fa-47b0-94ab-c9a999564fca\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a67efc46aeb5956568b680b4041d83a55c0410ccefdb4e43b95503c6416e1dfc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2253a3406f6de9463c1c615ad2ac76ce935067baa57b260f18afdb8d9e639e95\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa9b804f65dc9265342d6ec10b8cdd31d49f692289c7fd8292944e25a2c95ebe\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://02428a90ed2dd1c36d800ffda10ed8759d92a87ec8f467cdf9fb11511b9b9420\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:09Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:09:20Z is after 2025-08-24T17:21:41Z" Sep 29 19:09:20 crc kubenswrapper[4779]: I0929 19:09:20.203612 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:20 crc kubenswrapper[4779]: I0929 19:09:20.203642 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:20 crc kubenswrapper[4779]: I0929 19:09:20.203651 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:20 crc kubenswrapper[4779]: I0929 19:09:20.203671 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:20 crc kubenswrapper[4779]: I0929 19:09:20.203681 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:20Z","lastTransitionTime":"2025-09-29T19:09:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:20 crc kubenswrapper[4779]: I0929 19:09:20.218532 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ef5a50f788b42b32f4ec04eedc9f5c9dbcd07f64b430ec684d8a91b414e6c18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb910a5c408a63f6c4391cf2fae320e14d6dc945125b53eefde3b666ef9d0754\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:09:20Z is after 2025-08-24T17:21:41Z" Sep 29 19:09:20 crc kubenswrapper[4779]: I0929 19:09:20.232347 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://15b9c6a3d7c79b30673cd8040a9f3033e2405cf2e29b38d8ba2bee9cbb384794\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:09:20Z is after 2025-08-24T17:21:41Z" Sep 29 19:09:20 crc kubenswrapper[4779]: I0929 19:09:20.252512 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jfbb6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3ac24bbf-c37a-4253-be71-8d8f15cfd48e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:09:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:09:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0a42b5ee3be8c80c6772c05f938a0f0be5896c66157b38f8b36cc3f9e03a950d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2ea6959ae00f851559cc059498a3a7848fbbb55f6a25ed82e38efe62a7ee85ca\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T19:09:18Z\\\",\\\"message\\\":\\\"2025-09-29T19:08:32+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_88611c75-0338-4025-8d9a-c7a4eb6f87da\\\\n2025-09-29T19:08:32+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_88611c75-0338-4025-8d9a-c7a4eb6f87da to /host/opt/cni/bin/\\\\n2025-09-29T19:08:33Z [verbose] multus-daemon started\\\\n2025-09-29T19:08:33Z [verbose] Readiness Indicator file check\\\\n2025-09-29T19:09:18Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:09:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8ckjq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:30Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jfbb6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:09:20Z is after 2025-08-24T17:21:41Z" Sep 29 19:09:20 crc kubenswrapper[4779]: I0929 19:09:20.267448 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-cvx8m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"54a33b8e-b623-4f91-be1d-a38dfcef17d7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://49599fb872eb458cb3fd77da63518f77653f671e82a8fc1db5f9e2b64e3c4d1d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e697e7c54a1b8adfde2dfa86399bfd13d895196c12108ac625bd9b94ad913d1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e697e7c54a1b8adfde2dfa86399bfd13d895196c12108ac625bd9b94ad913d1e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://75932bd27c986408eb2a3ec9aefffa392bf1ede4be0ddeabb9fdcd5fcc56199d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://75932bd27c986408eb2a3ec9aefffa392bf1ede4be0ddeabb9fdcd5fcc56199d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e058271bf65bb6f216298a9fc58b97c76335275829be7e5bec59d92688e83a05\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e058271bf65bb6f216298a9fc58b97c76335275829be7e5bec59d92688e83a05\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://edec2f7ae27e72dcc2eac62281dc9b866cdc31ecbda5e24020043aad86a44784\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://edec2f7ae27e72dcc2eac62281dc9b866cdc31ecbda5e24020043aad86a44784\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1750034ffcb61628b437e7b4f604be66ea6401d0ddb8a70f37fbb358c261670f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1750034ffcb61628b437e7b4f604be66ea6401d0ddb8a70f37fbb358c261670f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3409883fc5d9ed6b5e08d50c9d2821a4ac0b8932c9a0296c12065b0abb0354d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3409883fc5d9ed6b5e08d50c9d2821a4ac0b8932c9a0296c12065b0abb0354d6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:30Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-cvx8m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:09:20Z is after 2025-08-24T17:21:41Z" Sep 29 19:09:20 crc kubenswrapper[4779]: I0929 19:09:20.279742 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"35c2e46d-85f5-4dbc-b75c-ff3cab6cc941\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:09:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:09:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://341b30347851b6ce8681296d5622e32df85756f3a372075489023227f0b7828b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://72b9d027e1821e0de6cae982b60001ce62087f205bf5553d9d65c6cfe2d6e0c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://703b4d9d7a59ba642408a7920fd8c2753f1474f28acaa371e0a6b90c75bb7e91\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6fcb83ea7913513031d2c874a33ec806cf823bf8e3838378db3d9c94ca9229a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6fcb83ea7913513031d2c874a33ec806cf823bf8e3838378db3d9c94ca9229a6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:10Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:09Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:09:20Z is after 2025-08-24T17:21:41Z" Sep 29 19:09:20 crc kubenswrapper[4779]: I0929 19:09:20.293944 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2f701cb-2e7d-4cea-8455-f68605964ac6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:09:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:09:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dfa32890cde2a0a47211239f797f83380b4418ad2b8fb1560c779705e68df39c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a017f2621de03e0184a0a168819a6762e4b36a1ae5c66a6bcfd1d65266fcb52\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a9691127cc0437ceebef7a82219adb83625690db4d6bf5cf1c8177a6a9e1284\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://19af6b603d6261471bf10fbf65244aa6842b15a02fbbdea09a8ab2d01ec1e1a4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b14ae0b08f077b3acbe802e4a5ec06dd10075100c37d5587119a09a92b4651a\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T19:08:29Z\\\",\\\"message\\\":\\\"le observer\\\\nW0929 19:08:29.408025 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0929 19:08:29.408423 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 19:08:29.411188 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1038548135/tls.crt::/tmp/serving-cert-1038548135/tls.key\\\\\\\"\\\\nI0929 19:08:29.848353 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0929 19:08:29.866865 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0929 19:08:29.866980 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0929 19:08:29.867032 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0929 19:08:29.867061 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0929 19:08:29.872666 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0929 19:08:29.872766 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0929 19:08:29.872793 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0929 19:08:29.872817 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0929 19:08:29.872839 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0929 19:08:29.872861 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0929 19:08:29.872883 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0929 19:08:29.873077 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0929 19:08:29.875629 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:24Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f0916d37ad679d190403229b40390af2860e24f777a07cd5ef0ac2f7d3fc3c41\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5a737775c31cf8dc37838d904ee00f1075bc2aa52bea63587806cb7486950eaf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5a737775c31cf8dc37838d904ee00f1075bc2aa52bea63587806cb7486950eaf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:09Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:09:20Z is after 2025-08-24T17:21:41Z" Sep 29 19:09:20 crc kubenswrapper[4779]: I0929 19:09:20.306923 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:20 crc kubenswrapper[4779]: I0929 19:09:20.306985 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:20 crc kubenswrapper[4779]: I0929 19:09:20.307003 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:20 crc kubenswrapper[4779]: I0929 19:09:20.307028 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:20 crc kubenswrapper[4779]: I0929 19:09:20.307044 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:20Z","lastTransitionTime":"2025-09-29T19:09:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:20 crc kubenswrapper[4779]: I0929 19:09:20.309278 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:09:20Z is after 2025-08-24T17:21:41Z" Sep 29 19:09:20 crc kubenswrapper[4779]: I0929 19:09:20.327828 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-42vjg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"046df2ef-fb75-4d32-93e6-17b36af0a7c2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://629d703a556665e86ce3b0a5af995157440c71c8ab7d29b9259865cd624543c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1555e96e70a9119cf2298d91512b1d8cffb303b4327d0e20bdf686e7e36e4461\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0d18ddf7528c789be80d32cd6921ada5ed114d01130b7be97e73ec6d9168267e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b6ffbcb52a6e250cd4b4bc4e8c089ef94babaaf0db836fec9f477450464eebe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://76304a56be996b646d706c0e7df995bb1518962396c7558304b1627e5c215c11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6dcd0277739f6d5e5c90c617485f053c1c3996192f15b081c660741987ec2611\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://23219c41b54182e6c1c57246636336210c37b9a2c9b3beedbb57daee3d909433\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://23219c41b54182e6c1c57246636336210c37b9a2c9b3beedbb57daee3d909433\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T19:09:03Z\\\",\\\"message\\\":\\\"r network=default : 881.553µs\\\\nI0929 19:09:03.467164 6425 obj_retry.go:303] Retry object setup: *v1.Pod openshift-network-operator/network-operator-58b4c7f79c-55gtf\\\\nI0929 19:09:03.466800 6425 services_controller.go:356] Processing sync for service openshift-dns-operator/metrics for network=default\\\\nI0929 19:09:03.467178 6425 obj_retry.go:365] Adding new object: *v1.Pod openshift-network-operator/network-operator-58b4c7f79c-55gtf\\\\nI0929 19:09:03.467185 6425 ovn.go:134] Ensuring zone local for Pod openshift-network-operator/network-operator-58b4c7f79c-55gtf in node crc\\\\nI0929 19:09:03.467190 6425 obj_retry.go:386] Retry successful for *v1.Pod openshift-network-operator/network-operator-58b4c7f79c-55gtf after 0 failed attempt(s)\\\\nI0929 19:09:03.467195 6425 default_network_controller.go:776] Recording success event on pod openshift-network-operator/network-operator-58b4c7f79c-55gtf\\\\nI0929 19:09:03.466832 6425 services_controller.go:434] Service default/kubernetes retrieved from lister for network=default: \\\\u0026Service{ObjectMeta:{kubernetes default 1fcaffea-cfe2-4295-9c2a-a3b3626fb3f1 259 0 2025-02-23 05:11:12 +0000 UTC \\\\u003cnil\\\\u003e \\\\u003cnil\\\\u003e map[component:apiserver provider:kubernetes] map[] [] [] []},Spec:ServiceSpec{Ports:[]ServicePort{ServicePort{Name:https,Protocol:TCP,Port:443,TargetPort:{0 6443 },NodePort:0,AppProtocol:nil,},},Selector:m\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T19:09:02Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-42vjg_openshift-ovn-kubernetes(046df2ef-fb75-4d32-93e6-17b36af0a7c2)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c8993f5be0b77b752f60e7346f609aaeef20db2f0e72cbebde06a11fa7d2f5bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e1e03c63d08ebe2a4b76954140684a5b43da9e92b93490aa7efa715fc2aefb4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e1e03c63d08ebe2a4b76954140684a5b43da9e92b93490aa7efa715fc2aefb4c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:30Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-42vjg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:09:20Z is after 2025-08-24T17:21:41Z" Sep 29 19:09:20 crc kubenswrapper[4779]: I0929 19:09:20.340085 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-zxpg4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e0db9b28-7e3a-4d44-9e98-1a07c8e5b8d6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2f0f837dfb05912fb4f7b0cb08cdb363cb3bfe2cc91a0bebd1b2b2b8128d78cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6ghv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:33Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-zxpg4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:09:20Z is after 2025-08-24T17:21:41Z" Sep 29 19:09:20 crc kubenswrapper[4779]: I0929 19:09:20.351092 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-7hb2m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"45b89d12-bbd2-4b47-815d-a7421cc1aa00\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://002d12663649198e6136f3e9df16a3b1cd8c64906bf39bde03cd0dde8c36abbc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bvn4g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:30Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-7hb2m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:09:20Z is after 2025-08-24T17:21:41Z" Sep 29 19:09:20 crc kubenswrapper[4779]: I0929 19:09:20.369418 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:09:20Z is after 2025-08-24T17:21:41Z" Sep 29 19:09:20 crc kubenswrapper[4779]: I0929 19:09:20.383469 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fvdtc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"97c4781c-2d4b-4eab-96fb-39a342c2d4a0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c2179d9cf26eb6d6fcc63666d3dd53e9a1a66cf633c9fe0b29e6d6ac726c8119\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7cxcv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3aeed56672d12f2355eda710f9021c6271debfaf1f7b7d6b1ff5f4ecc6764e2e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7cxcv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:43Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-fvdtc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:09:20Z is after 2025-08-24T17:21:41Z" Sep 29 19:09:20 crc kubenswrapper[4779]: I0929 19:09:20.398267 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:09:20Z is after 2025-08-24T17:21:41Z" Sep 29 19:09:20 crc kubenswrapper[4779]: I0929 19:09:20.410243 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:20 crc kubenswrapper[4779]: I0929 19:09:20.410303 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:20 crc kubenswrapper[4779]: I0929 19:09:20.410335 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:20 crc kubenswrapper[4779]: I0929 19:09:20.410357 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:20 crc kubenswrapper[4779]: I0929 19:09:20.410372 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:20Z","lastTransitionTime":"2025-09-29T19:09:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:20 crc kubenswrapper[4779]: I0929 19:09:20.414000 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://04d9dd8d319497dd3ed7c565217d8b9d228a1952a3cd0e8dee3cdad781c5770a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:09:20Z is after 2025-08-24T17:21:41Z" Sep 29 19:09:20 crc kubenswrapper[4779]: I0929 19:09:20.428835 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"476bc421-1113-455e-bcc8-e207e47dad19\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://30844145987b3dae28d2ca3f75a36e21c27216b267ff40fdb83094b8045403c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tb7zk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7168ecfffda2ef4f7782650466ccebb8abcff3293fcd33d44eb4ee24abf6339c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tb7zk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:30Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-d5cnr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:09:20Z is after 2025-08-24T17:21:41Z" Sep 29 19:09:20 crc kubenswrapper[4779]: I0929 19:09:20.440568 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-2rtwf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4df079c4-34e3-4132-91bb-ad68488552f8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7sr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7sr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:44Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-2rtwf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:09:20Z is after 2025-08-24T17:21:41Z" Sep 29 19:09:20 crc kubenswrapper[4779]: I0929 19:09:20.513569 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:20 crc kubenswrapper[4779]: I0929 19:09:20.513616 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:20 crc kubenswrapper[4779]: I0929 19:09:20.513629 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:20 crc kubenswrapper[4779]: I0929 19:09:20.513651 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:20 crc kubenswrapper[4779]: I0929 19:09:20.513666 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:20Z","lastTransitionTime":"2025-09-29T19:09:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:20 crc kubenswrapper[4779]: I0929 19:09:20.615717 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:20 crc kubenswrapper[4779]: I0929 19:09:20.615791 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:20 crc kubenswrapper[4779]: I0929 19:09:20.615815 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:20 crc kubenswrapper[4779]: I0929 19:09:20.615844 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:20 crc kubenswrapper[4779]: I0929 19:09:20.615867 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:20Z","lastTransitionTime":"2025-09-29T19:09:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:20 crc kubenswrapper[4779]: I0929 19:09:20.718210 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:20 crc kubenswrapper[4779]: I0929 19:09:20.718253 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:20 crc kubenswrapper[4779]: I0929 19:09:20.718264 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:20 crc kubenswrapper[4779]: I0929 19:09:20.718284 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:20 crc kubenswrapper[4779]: I0929 19:09:20.718299 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:20Z","lastTransitionTime":"2025-09-29T19:09:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:20 crc kubenswrapper[4779]: I0929 19:09:20.766244 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 19:09:20 crc kubenswrapper[4779]: I0929 19:09:20.766272 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 19:09:20 crc kubenswrapper[4779]: I0929 19:09:20.766414 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-2rtwf" Sep 29 19:09:20 crc kubenswrapper[4779]: E0929 19:09:20.766475 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 19:09:20 crc kubenswrapper[4779]: I0929 19:09:20.766270 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 19:09:20 crc kubenswrapper[4779]: E0929 19:09:20.766418 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 19:09:20 crc kubenswrapper[4779]: E0929 19:09:20.766630 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-2rtwf" podUID="4df079c4-34e3-4132-91bb-ad68488552f8" Sep 29 19:09:20 crc kubenswrapper[4779]: E0929 19:09:20.766764 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 19:09:20 crc kubenswrapper[4779]: I0929 19:09:20.821020 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:20 crc kubenswrapper[4779]: I0929 19:09:20.821079 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:20 crc kubenswrapper[4779]: I0929 19:09:20.821091 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:20 crc kubenswrapper[4779]: I0929 19:09:20.821109 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:20 crc kubenswrapper[4779]: I0929 19:09:20.821121 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:20Z","lastTransitionTime":"2025-09-29T19:09:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:20 crc kubenswrapper[4779]: I0929 19:09:20.923912 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:20 crc kubenswrapper[4779]: I0929 19:09:20.923974 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:20 crc kubenswrapper[4779]: I0929 19:09:20.923996 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:20 crc kubenswrapper[4779]: I0929 19:09:20.924026 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:20 crc kubenswrapper[4779]: I0929 19:09:20.924050 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:20Z","lastTransitionTime":"2025-09-29T19:09:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:21 crc kubenswrapper[4779]: I0929 19:09:21.027622 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:21 crc kubenswrapper[4779]: I0929 19:09:21.027684 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:21 crc kubenswrapper[4779]: I0929 19:09:21.027694 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:21 crc kubenswrapper[4779]: I0929 19:09:21.027709 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:21 crc kubenswrapper[4779]: I0929 19:09:21.027720 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:21Z","lastTransitionTime":"2025-09-29T19:09:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:21 crc kubenswrapper[4779]: I0929 19:09:21.130443 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:21 crc kubenswrapper[4779]: I0929 19:09:21.130495 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:21 crc kubenswrapper[4779]: I0929 19:09:21.130512 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:21 crc kubenswrapper[4779]: I0929 19:09:21.130533 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:21 crc kubenswrapper[4779]: I0929 19:09:21.130548 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:21Z","lastTransitionTime":"2025-09-29T19:09:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:21 crc kubenswrapper[4779]: I0929 19:09:21.233551 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:21 crc kubenswrapper[4779]: I0929 19:09:21.233612 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:21 crc kubenswrapper[4779]: I0929 19:09:21.233629 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:21 crc kubenswrapper[4779]: I0929 19:09:21.233652 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:21 crc kubenswrapper[4779]: I0929 19:09:21.233670 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:21Z","lastTransitionTime":"2025-09-29T19:09:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:21 crc kubenswrapper[4779]: I0929 19:09:21.336572 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:21 crc kubenswrapper[4779]: I0929 19:09:21.336633 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:21 crc kubenswrapper[4779]: I0929 19:09:21.336651 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:21 crc kubenswrapper[4779]: I0929 19:09:21.336674 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:21 crc kubenswrapper[4779]: I0929 19:09:21.336692 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:21Z","lastTransitionTime":"2025-09-29T19:09:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:21 crc kubenswrapper[4779]: I0929 19:09:21.439887 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:21 crc kubenswrapper[4779]: I0929 19:09:21.439953 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:21 crc kubenswrapper[4779]: I0929 19:09:21.439967 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:21 crc kubenswrapper[4779]: I0929 19:09:21.439986 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:21 crc kubenswrapper[4779]: I0929 19:09:21.439999 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:21Z","lastTransitionTime":"2025-09-29T19:09:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:21 crc kubenswrapper[4779]: I0929 19:09:21.544174 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:21 crc kubenswrapper[4779]: I0929 19:09:21.544236 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:21 crc kubenswrapper[4779]: I0929 19:09:21.544254 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:21 crc kubenswrapper[4779]: I0929 19:09:21.544277 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:21 crc kubenswrapper[4779]: I0929 19:09:21.544294 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:21Z","lastTransitionTime":"2025-09-29T19:09:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:21 crc kubenswrapper[4779]: I0929 19:09:21.647273 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:21 crc kubenswrapper[4779]: I0929 19:09:21.647371 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:21 crc kubenswrapper[4779]: I0929 19:09:21.647397 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:21 crc kubenswrapper[4779]: I0929 19:09:21.647426 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:21 crc kubenswrapper[4779]: I0929 19:09:21.647447 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:21Z","lastTransitionTime":"2025-09-29T19:09:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:21 crc kubenswrapper[4779]: I0929 19:09:21.750423 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:21 crc kubenswrapper[4779]: I0929 19:09:21.750463 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:21 crc kubenswrapper[4779]: I0929 19:09:21.750480 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:21 crc kubenswrapper[4779]: I0929 19:09:21.750501 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:21 crc kubenswrapper[4779]: I0929 19:09:21.750517 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:21Z","lastTransitionTime":"2025-09-29T19:09:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:21 crc kubenswrapper[4779]: I0929 19:09:21.852797 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:21 crc kubenswrapper[4779]: I0929 19:09:21.852846 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:21 crc kubenswrapper[4779]: I0929 19:09:21.852858 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:21 crc kubenswrapper[4779]: I0929 19:09:21.852874 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:21 crc kubenswrapper[4779]: I0929 19:09:21.852885 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:21Z","lastTransitionTime":"2025-09-29T19:09:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:21 crc kubenswrapper[4779]: I0929 19:09:21.955889 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:21 crc kubenswrapper[4779]: I0929 19:09:21.955950 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:21 crc kubenswrapper[4779]: I0929 19:09:21.955969 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:21 crc kubenswrapper[4779]: I0929 19:09:21.955993 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:21 crc kubenswrapper[4779]: I0929 19:09:21.956011 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:21Z","lastTransitionTime":"2025-09-29T19:09:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:22 crc kubenswrapper[4779]: I0929 19:09:22.059227 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:22 crc kubenswrapper[4779]: I0929 19:09:22.059296 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:22 crc kubenswrapper[4779]: I0929 19:09:22.059334 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:22 crc kubenswrapper[4779]: I0929 19:09:22.059358 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:22 crc kubenswrapper[4779]: I0929 19:09:22.059376 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:22Z","lastTransitionTime":"2025-09-29T19:09:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:22 crc kubenswrapper[4779]: I0929 19:09:22.162278 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:22 crc kubenswrapper[4779]: I0929 19:09:22.162334 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:22 crc kubenswrapper[4779]: I0929 19:09:22.162344 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:22 crc kubenswrapper[4779]: I0929 19:09:22.162359 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:22 crc kubenswrapper[4779]: I0929 19:09:22.162369 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:22Z","lastTransitionTime":"2025-09-29T19:09:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:22 crc kubenswrapper[4779]: I0929 19:09:22.264803 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:22 crc kubenswrapper[4779]: I0929 19:09:22.264843 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:22 crc kubenswrapper[4779]: I0929 19:09:22.264854 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:22 crc kubenswrapper[4779]: I0929 19:09:22.264869 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:22 crc kubenswrapper[4779]: I0929 19:09:22.264879 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:22Z","lastTransitionTime":"2025-09-29T19:09:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:22 crc kubenswrapper[4779]: I0929 19:09:22.367765 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:22 crc kubenswrapper[4779]: I0929 19:09:22.367819 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:22 crc kubenswrapper[4779]: I0929 19:09:22.367831 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:22 crc kubenswrapper[4779]: I0929 19:09:22.367850 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:22 crc kubenswrapper[4779]: I0929 19:09:22.367862 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:22Z","lastTransitionTime":"2025-09-29T19:09:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:22 crc kubenswrapper[4779]: I0929 19:09:22.470295 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:22 crc kubenswrapper[4779]: I0929 19:09:22.470360 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:22 crc kubenswrapper[4779]: I0929 19:09:22.470373 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:22 crc kubenswrapper[4779]: I0929 19:09:22.470388 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:22 crc kubenswrapper[4779]: I0929 19:09:22.470397 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:22Z","lastTransitionTime":"2025-09-29T19:09:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:22 crc kubenswrapper[4779]: I0929 19:09:22.572208 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:22 crc kubenswrapper[4779]: I0929 19:09:22.572250 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:22 crc kubenswrapper[4779]: I0929 19:09:22.572259 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:22 crc kubenswrapper[4779]: I0929 19:09:22.572272 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:22 crc kubenswrapper[4779]: I0929 19:09:22.572282 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:22Z","lastTransitionTime":"2025-09-29T19:09:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:22 crc kubenswrapper[4779]: I0929 19:09:22.674105 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:22 crc kubenswrapper[4779]: I0929 19:09:22.674140 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:22 crc kubenswrapper[4779]: I0929 19:09:22.674148 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:22 crc kubenswrapper[4779]: I0929 19:09:22.674161 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:22 crc kubenswrapper[4779]: I0929 19:09:22.674170 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:22Z","lastTransitionTime":"2025-09-29T19:09:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:22 crc kubenswrapper[4779]: I0929 19:09:22.765608 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 19:09:22 crc kubenswrapper[4779]: I0929 19:09:22.765666 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 19:09:22 crc kubenswrapper[4779]: I0929 19:09:22.765632 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 19:09:22 crc kubenswrapper[4779]: I0929 19:09:22.765608 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-2rtwf" Sep 29 19:09:22 crc kubenswrapper[4779]: E0929 19:09:22.765823 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 19:09:22 crc kubenswrapper[4779]: E0929 19:09:22.765938 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-2rtwf" podUID="4df079c4-34e3-4132-91bb-ad68488552f8" Sep 29 19:09:22 crc kubenswrapper[4779]: E0929 19:09:22.766045 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 19:09:22 crc kubenswrapper[4779]: E0929 19:09:22.766130 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 19:09:22 crc kubenswrapper[4779]: I0929 19:09:22.776449 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:22 crc kubenswrapper[4779]: I0929 19:09:22.776476 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:22 crc kubenswrapper[4779]: I0929 19:09:22.776484 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:22 crc kubenswrapper[4779]: I0929 19:09:22.776497 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:22 crc kubenswrapper[4779]: I0929 19:09:22.776507 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:22Z","lastTransitionTime":"2025-09-29T19:09:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:22 crc kubenswrapper[4779]: I0929 19:09:22.879392 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:22 crc kubenswrapper[4779]: I0929 19:09:22.879461 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:22 crc kubenswrapper[4779]: I0929 19:09:22.879485 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:22 crc kubenswrapper[4779]: I0929 19:09:22.879516 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:22 crc kubenswrapper[4779]: I0929 19:09:22.879540 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:22Z","lastTransitionTime":"2025-09-29T19:09:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:22 crc kubenswrapper[4779]: I0929 19:09:22.982497 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:22 crc kubenswrapper[4779]: I0929 19:09:22.982537 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:22 crc kubenswrapper[4779]: I0929 19:09:22.982546 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:22 crc kubenswrapper[4779]: I0929 19:09:22.982564 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:22 crc kubenswrapper[4779]: I0929 19:09:22.982573 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:22Z","lastTransitionTime":"2025-09-29T19:09:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:23 crc kubenswrapper[4779]: I0929 19:09:23.085522 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:23 crc kubenswrapper[4779]: I0929 19:09:23.085564 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:23 crc kubenswrapper[4779]: I0929 19:09:23.085573 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:23 crc kubenswrapper[4779]: I0929 19:09:23.085587 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:23 crc kubenswrapper[4779]: I0929 19:09:23.085598 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:23Z","lastTransitionTime":"2025-09-29T19:09:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:23 crc kubenswrapper[4779]: I0929 19:09:23.187355 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:23 crc kubenswrapper[4779]: I0929 19:09:23.187402 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:23 crc kubenswrapper[4779]: I0929 19:09:23.187413 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:23 crc kubenswrapper[4779]: I0929 19:09:23.187431 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:23 crc kubenswrapper[4779]: I0929 19:09:23.187441 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:23Z","lastTransitionTime":"2025-09-29T19:09:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:23 crc kubenswrapper[4779]: I0929 19:09:23.289500 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:23 crc kubenswrapper[4779]: I0929 19:09:23.289536 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:23 crc kubenswrapper[4779]: I0929 19:09:23.289547 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:23 crc kubenswrapper[4779]: I0929 19:09:23.289563 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:23 crc kubenswrapper[4779]: I0929 19:09:23.289578 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:23Z","lastTransitionTime":"2025-09-29T19:09:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:23 crc kubenswrapper[4779]: I0929 19:09:23.392142 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:23 crc kubenswrapper[4779]: I0929 19:09:23.392200 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:23 crc kubenswrapper[4779]: I0929 19:09:23.392208 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:23 crc kubenswrapper[4779]: I0929 19:09:23.392223 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:23 crc kubenswrapper[4779]: I0929 19:09:23.392233 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:23Z","lastTransitionTime":"2025-09-29T19:09:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:23 crc kubenswrapper[4779]: I0929 19:09:23.495574 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:23 crc kubenswrapper[4779]: I0929 19:09:23.495629 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:23 crc kubenswrapper[4779]: I0929 19:09:23.495645 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:23 crc kubenswrapper[4779]: I0929 19:09:23.495667 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:23 crc kubenswrapper[4779]: I0929 19:09:23.495689 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:23Z","lastTransitionTime":"2025-09-29T19:09:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:23 crc kubenswrapper[4779]: I0929 19:09:23.599353 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:23 crc kubenswrapper[4779]: I0929 19:09:23.599468 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:23 crc kubenswrapper[4779]: I0929 19:09:23.599496 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:23 crc kubenswrapper[4779]: I0929 19:09:23.599519 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:23 crc kubenswrapper[4779]: I0929 19:09:23.599537 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:23Z","lastTransitionTime":"2025-09-29T19:09:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:23 crc kubenswrapper[4779]: I0929 19:09:23.702629 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:23 crc kubenswrapper[4779]: I0929 19:09:23.702686 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:23 crc kubenswrapper[4779]: I0929 19:09:23.702698 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:23 crc kubenswrapper[4779]: I0929 19:09:23.702718 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:23 crc kubenswrapper[4779]: I0929 19:09:23.702731 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:23Z","lastTransitionTime":"2025-09-29T19:09:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:23 crc kubenswrapper[4779]: I0929 19:09:23.805022 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:23 crc kubenswrapper[4779]: I0929 19:09:23.805061 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:23 crc kubenswrapper[4779]: I0929 19:09:23.805070 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:23 crc kubenswrapper[4779]: I0929 19:09:23.805084 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:23 crc kubenswrapper[4779]: I0929 19:09:23.805095 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:23Z","lastTransitionTime":"2025-09-29T19:09:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:23 crc kubenswrapper[4779]: I0929 19:09:23.907433 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:23 crc kubenswrapper[4779]: I0929 19:09:23.907471 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:23 crc kubenswrapper[4779]: I0929 19:09:23.907479 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:23 crc kubenswrapper[4779]: I0929 19:09:23.907494 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:23 crc kubenswrapper[4779]: I0929 19:09:23.907502 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:23Z","lastTransitionTime":"2025-09-29T19:09:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:24 crc kubenswrapper[4779]: I0929 19:09:24.010818 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:24 crc kubenswrapper[4779]: I0929 19:09:24.010897 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:24 crc kubenswrapper[4779]: I0929 19:09:24.010921 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:24 crc kubenswrapper[4779]: I0929 19:09:24.010947 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:24 crc kubenswrapper[4779]: I0929 19:09:24.010966 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:24Z","lastTransitionTime":"2025-09-29T19:09:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:24 crc kubenswrapper[4779]: I0929 19:09:24.113542 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:24 crc kubenswrapper[4779]: I0929 19:09:24.113585 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:24 crc kubenswrapper[4779]: I0929 19:09:24.113594 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:24 crc kubenswrapper[4779]: I0929 19:09:24.113610 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:24 crc kubenswrapper[4779]: I0929 19:09:24.113619 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:24Z","lastTransitionTime":"2025-09-29T19:09:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:24 crc kubenswrapper[4779]: I0929 19:09:24.216690 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:24 crc kubenswrapper[4779]: I0929 19:09:24.216754 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:24 crc kubenswrapper[4779]: I0929 19:09:24.216778 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:24 crc kubenswrapper[4779]: I0929 19:09:24.216808 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:24 crc kubenswrapper[4779]: I0929 19:09:24.216831 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:24Z","lastTransitionTime":"2025-09-29T19:09:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:24 crc kubenswrapper[4779]: I0929 19:09:24.319678 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:24 crc kubenswrapper[4779]: I0929 19:09:24.319736 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:24 crc kubenswrapper[4779]: I0929 19:09:24.319752 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:24 crc kubenswrapper[4779]: I0929 19:09:24.319777 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:24 crc kubenswrapper[4779]: I0929 19:09:24.319793 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:24Z","lastTransitionTime":"2025-09-29T19:09:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:24 crc kubenswrapper[4779]: I0929 19:09:24.422752 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:24 crc kubenswrapper[4779]: I0929 19:09:24.422795 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:24 crc kubenswrapper[4779]: I0929 19:09:24.422806 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:24 crc kubenswrapper[4779]: I0929 19:09:24.422822 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:24 crc kubenswrapper[4779]: I0929 19:09:24.422833 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:24Z","lastTransitionTime":"2025-09-29T19:09:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:24 crc kubenswrapper[4779]: I0929 19:09:24.525359 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:24 crc kubenswrapper[4779]: I0929 19:09:24.525397 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:24 crc kubenswrapper[4779]: I0929 19:09:24.525410 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:24 crc kubenswrapper[4779]: I0929 19:09:24.525426 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:24 crc kubenswrapper[4779]: I0929 19:09:24.525437 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:24Z","lastTransitionTime":"2025-09-29T19:09:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:24 crc kubenswrapper[4779]: I0929 19:09:24.628740 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:24 crc kubenswrapper[4779]: I0929 19:09:24.628800 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:24 crc kubenswrapper[4779]: I0929 19:09:24.628821 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:24 crc kubenswrapper[4779]: I0929 19:09:24.628844 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:24 crc kubenswrapper[4779]: I0929 19:09:24.628861 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:24Z","lastTransitionTime":"2025-09-29T19:09:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:24 crc kubenswrapper[4779]: I0929 19:09:24.732246 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:24 crc kubenswrapper[4779]: I0929 19:09:24.732305 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:24 crc kubenswrapper[4779]: I0929 19:09:24.732354 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:24 crc kubenswrapper[4779]: I0929 19:09:24.732378 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:24 crc kubenswrapper[4779]: I0929 19:09:24.732394 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:24Z","lastTransitionTime":"2025-09-29T19:09:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:24 crc kubenswrapper[4779]: I0929 19:09:24.765083 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 19:09:24 crc kubenswrapper[4779]: I0929 19:09:24.765166 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 19:09:24 crc kubenswrapper[4779]: I0929 19:09:24.765179 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 19:09:24 crc kubenswrapper[4779]: E0929 19:09:24.765303 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 19:09:24 crc kubenswrapper[4779]: I0929 19:09:24.765375 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-2rtwf" Sep 29 19:09:24 crc kubenswrapper[4779]: E0929 19:09:24.765521 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 19:09:24 crc kubenswrapper[4779]: E0929 19:09:24.765662 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-2rtwf" podUID="4df079c4-34e3-4132-91bb-ad68488552f8" Sep 29 19:09:24 crc kubenswrapper[4779]: E0929 19:09:24.765731 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 19:09:24 crc kubenswrapper[4779]: I0929 19:09:24.834567 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:24 crc kubenswrapper[4779]: I0929 19:09:24.834625 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:24 crc kubenswrapper[4779]: I0929 19:09:24.834649 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:24 crc kubenswrapper[4779]: I0929 19:09:24.834669 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:24 crc kubenswrapper[4779]: I0929 19:09:24.834683 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:24Z","lastTransitionTime":"2025-09-29T19:09:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:24 crc kubenswrapper[4779]: I0929 19:09:24.937370 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:24 crc kubenswrapper[4779]: I0929 19:09:24.937421 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:24 crc kubenswrapper[4779]: I0929 19:09:24.937433 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:24 crc kubenswrapper[4779]: I0929 19:09:24.937451 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:24 crc kubenswrapper[4779]: I0929 19:09:24.937463 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:24Z","lastTransitionTime":"2025-09-29T19:09:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:25 crc kubenswrapper[4779]: I0929 19:09:25.039462 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:25 crc kubenswrapper[4779]: I0929 19:09:25.039525 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:25 crc kubenswrapper[4779]: I0929 19:09:25.039542 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:25 crc kubenswrapper[4779]: I0929 19:09:25.039564 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:25 crc kubenswrapper[4779]: I0929 19:09:25.039583 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:25Z","lastTransitionTime":"2025-09-29T19:09:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:25 crc kubenswrapper[4779]: I0929 19:09:25.142936 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:25 crc kubenswrapper[4779]: I0929 19:09:25.142989 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:25 crc kubenswrapper[4779]: I0929 19:09:25.142998 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:25 crc kubenswrapper[4779]: I0929 19:09:25.143012 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:25 crc kubenswrapper[4779]: I0929 19:09:25.143020 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:25Z","lastTransitionTime":"2025-09-29T19:09:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:25 crc kubenswrapper[4779]: I0929 19:09:25.246067 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:25 crc kubenswrapper[4779]: I0929 19:09:25.246143 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:25 crc kubenswrapper[4779]: I0929 19:09:25.246161 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:25 crc kubenswrapper[4779]: I0929 19:09:25.246185 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:25 crc kubenswrapper[4779]: I0929 19:09:25.246204 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:25Z","lastTransitionTime":"2025-09-29T19:09:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:25 crc kubenswrapper[4779]: I0929 19:09:25.348819 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:25 crc kubenswrapper[4779]: I0929 19:09:25.348866 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:25 crc kubenswrapper[4779]: I0929 19:09:25.348883 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:25 crc kubenswrapper[4779]: I0929 19:09:25.348908 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:25 crc kubenswrapper[4779]: I0929 19:09:25.348924 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:25Z","lastTransitionTime":"2025-09-29T19:09:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:25 crc kubenswrapper[4779]: I0929 19:09:25.452377 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:25 crc kubenswrapper[4779]: I0929 19:09:25.452446 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:25 crc kubenswrapper[4779]: I0929 19:09:25.452478 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:25 crc kubenswrapper[4779]: I0929 19:09:25.452506 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:25 crc kubenswrapper[4779]: I0929 19:09:25.452527 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:25Z","lastTransitionTime":"2025-09-29T19:09:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:25 crc kubenswrapper[4779]: I0929 19:09:25.555491 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:25 crc kubenswrapper[4779]: I0929 19:09:25.555548 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:25 crc kubenswrapper[4779]: I0929 19:09:25.555572 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:25 crc kubenswrapper[4779]: I0929 19:09:25.555599 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:25 crc kubenswrapper[4779]: I0929 19:09:25.555617 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:25Z","lastTransitionTime":"2025-09-29T19:09:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:25 crc kubenswrapper[4779]: I0929 19:09:25.658415 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:25 crc kubenswrapper[4779]: I0929 19:09:25.658470 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:25 crc kubenswrapper[4779]: I0929 19:09:25.658488 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:25 crc kubenswrapper[4779]: I0929 19:09:25.658946 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:25 crc kubenswrapper[4779]: I0929 19:09:25.658996 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:25Z","lastTransitionTime":"2025-09-29T19:09:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:25 crc kubenswrapper[4779]: I0929 19:09:25.764392 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:25 crc kubenswrapper[4779]: I0929 19:09:25.764462 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:25 crc kubenswrapper[4779]: I0929 19:09:25.764480 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:25 crc kubenswrapper[4779]: I0929 19:09:25.764503 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:25 crc kubenswrapper[4779]: I0929 19:09:25.764520 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:25Z","lastTransitionTime":"2025-09-29T19:09:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:25 crc kubenswrapper[4779]: I0929 19:09:25.867402 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:25 crc kubenswrapper[4779]: I0929 19:09:25.867433 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:25 crc kubenswrapper[4779]: I0929 19:09:25.867441 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:25 crc kubenswrapper[4779]: I0929 19:09:25.867460 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:25 crc kubenswrapper[4779]: I0929 19:09:25.867475 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:25Z","lastTransitionTime":"2025-09-29T19:09:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:25 crc kubenswrapper[4779]: I0929 19:09:25.969936 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:25 crc kubenswrapper[4779]: I0929 19:09:25.970018 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:25 crc kubenswrapper[4779]: I0929 19:09:25.970031 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:25 crc kubenswrapper[4779]: I0929 19:09:25.970068 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:25 crc kubenswrapper[4779]: I0929 19:09:25.970081 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:25Z","lastTransitionTime":"2025-09-29T19:09:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:26 crc kubenswrapper[4779]: I0929 19:09:26.072877 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:26 crc kubenswrapper[4779]: I0929 19:09:26.073163 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:26 crc kubenswrapper[4779]: I0929 19:09:26.073181 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:26 crc kubenswrapper[4779]: I0929 19:09:26.073215 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:26 crc kubenswrapper[4779]: I0929 19:09:26.073230 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:26Z","lastTransitionTime":"2025-09-29T19:09:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:26 crc kubenswrapper[4779]: I0929 19:09:26.175981 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:26 crc kubenswrapper[4779]: I0929 19:09:26.176041 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:26 crc kubenswrapper[4779]: I0929 19:09:26.176056 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:26 crc kubenswrapper[4779]: I0929 19:09:26.176076 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:26 crc kubenswrapper[4779]: I0929 19:09:26.176088 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:26Z","lastTransitionTime":"2025-09-29T19:09:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:26 crc kubenswrapper[4779]: I0929 19:09:26.279035 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:26 crc kubenswrapper[4779]: I0929 19:09:26.279115 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:26 crc kubenswrapper[4779]: I0929 19:09:26.279132 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:26 crc kubenswrapper[4779]: I0929 19:09:26.279158 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:26 crc kubenswrapper[4779]: I0929 19:09:26.279176 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:26Z","lastTransitionTime":"2025-09-29T19:09:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:26 crc kubenswrapper[4779]: I0929 19:09:26.382232 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:26 crc kubenswrapper[4779]: I0929 19:09:26.382294 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:26 crc kubenswrapper[4779]: I0929 19:09:26.382310 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:26 crc kubenswrapper[4779]: I0929 19:09:26.382366 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:26 crc kubenswrapper[4779]: I0929 19:09:26.382382 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:26Z","lastTransitionTime":"2025-09-29T19:09:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:26 crc kubenswrapper[4779]: I0929 19:09:26.485145 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:26 crc kubenswrapper[4779]: I0929 19:09:26.485425 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:26 crc kubenswrapper[4779]: I0929 19:09:26.485510 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:26 crc kubenswrapper[4779]: I0929 19:09:26.485593 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:26 crc kubenswrapper[4779]: I0929 19:09:26.485666 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:26Z","lastTransitionTime":"2025-09-29T19:09:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:26 crc kubenswrapper[4779]: I0929 19:09:26.588526 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:26 crc kubenswrapper[4779]: I0929 19:09:26.589162 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:26 crc kubenswrapper[4779]: I0929 19:09:26.589182 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:26 crc kubenswrapper[4779]: I0929 19:09:26.589201 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:26 crc kubenswrapper[4779]: I0929 19:09:26.589214 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:26Z","lastTransitionTime":"2025-09-29T19:09:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:26 crc kubenswrapper[4779]: I0929 19:09:26.691890 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:26 crc kubenswrapper[4779]: I0929 19:09:26.691944 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:26 crc kubenswrapper[4779]: I0929 19:09:26.691956 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:26 crc kubenswrapper[4779]: I0929 19:09:26.691969 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:26 crc kubenswrapper[4779]: I0929 19:09:26.691978 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:26Z","lastTransitionTime":"2025-09-29T19:09:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:26 crc kubenswrapper[4779]: I0929 19:09:26.766059 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 19:09:26 crc kubenswrapper[4779]: I0929 19:09:26.766064 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 19:09:26 crc kubenswrapper[4779]: I0929 19:09:26.766084 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-2rtwf" Sep 29 19:09:26 crc kubenswrapper[4779]: I0929 19:09:26.766211 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 19:09:26 crc kubenswrapper[4779]: E0929 19:09:26.766352 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 19:09:26 crc kubenswrapper[4779]: E0929 19:09:26.766501 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 19:09:26 crc kubenswrapper[4779]: E0929 19:09:26.766547 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-2rtwf" podUID="4df079c4-34e3-4132-91bb-ad68488552f8" Sep 29 19:09:26 crc kubenswrapper[4779]: E0929 19:09:26.766615 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 19:09:26 crc kubenswrapper[4779]: I0929 19:09:26.794478 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:26 crc kubenswrapper[4779]: I0929 19:09:26.794518 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:26 crc kubenswrapper[4779]: I0929 19:09:26.794529 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:26 crc kubenswrapper[4779]: I0929 19:09:26.794547 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:26 crc kubenswrapper[4779]: I0929 19:09:26.794729 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:26Z","lastTransitionTime":"2025-09-29T19:09:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:26 crc kubenswrapper[4779]: I0929 19:09:26.896860 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:26 crc kubenswrapper[4779]: I0929 19:09:26.896916 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:26 crc kubenswrapper[4779]: I0929 19:09:26.896939 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:26 crc kubenswrapper[4779]: I0929 19:09:26.896959 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:26 crc kubenswrapper[4779]: I0929 19:09:26.896975 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:26Z","lastTransitionTime":"2025-09-29T19:09:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:27 crc kubenswrapper[4779]: I0929 19:09:27.000379 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:27 crc kubenswrapper[4779]: I0929 19:09:27.000525 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:27 crc kubenswrapper[4779]: I0929 19:09:27.000545 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:27 crc kubenswrapper[4779]: I0929 19:09:27.000571 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:27 crc kubenswrapper[4779]: I0929 19:09:27.000588 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:27Z","lastTransitionTime":"2025-09-29T19:09:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:27 crc kubenswrapper[4779]: I0929 19:09:27.103708 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:27 crc kubenswrapper[4779]: I0929 19:09:27.103766 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:27 crc kubenswrapper[4779]: I0929 19:09:27.103784 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:27 crc kubenswrapper[4779]: I0929 19:09:27.103807 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:27 crc kubenswrapper[4779]: I0929 19:09:27.103827 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:27Z","lastTransitionTime":"2025-09-29T19:09:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:27 crc kubenswrapper[4779]: I0929 19:09:27.207144 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:27 crc kubenswrapper[4779]: I0929 19:09:27.207200 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:27 crc kubenswrapper[4779]: I0929 19:09:27.207217 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:27 crc kubenswrapper[4779]: I0929 19:09:27.207242 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:27 crc kubenswrapper[4779]: I0929 19:09:27.207260 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:27Z","lastTransitionTime":"2025-09-29T19:09:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:27 crc kubenswrapper[4779]: I0929 19:09:27.310673 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:27 crc kubenswrapper[4779]: I0929 19:09:27.310749 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:27 crc kubenswrapper[4779]: I0929 19:09:27.310774 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:27 crc kubenswrapper[4779]: I0929 19:09:27.310804 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:27 crc kubenswrapper[4779]: I0929 19:09:27.310827 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:27Z","lastTransitionTime":"2025-09-29T19:09:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:27 crc kubenswrapper[4779]: I0929 19:09:27.413723 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:27 crc kubenswrapper[4779]: I0929 19:09:27.413787 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:27 crc kubenswrapper[4779]: I0929 19:09:27.413804 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:27 crc kubenswrapper[4779]: I0929 19:09:27.413829 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:27 crc kubenswrapper[4779]: I0929 19:09:27.413845 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:27Z","lastTransitionTime":"2025-09-29T19:09:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:27 crc kubenswrapper[4779]: I0929 19:09:27.436572 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:27 crc kubenswrapper[4779]: I0929 19:09:27.436662 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:27 crc kubenswrapper[4779]: I0929 19:09:27.436689 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:27 crc kubenswrapper[4779]: I0929 19:09:27.436724 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:27 crc kubenswrapper[4779]: I0929 19:09:27.436747 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:27Z","lastTransitionTime":"2025-09-29T19:09:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:27 crc kubenswrapper[4779]: E0929 19:09:27.457423 4779 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T19:09:27Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T19:09:27Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T19:09:27Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T19:09:27Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T19:09:27Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T19:09:27Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T19:09:27Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T19:09:27Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c0f08dfd-4d0c-4b55-a30c-6725bfe13689\\\",\\\"systemUUID\\\":\\\"d61591a8-214b-4be1-8c58-e9ade5216b62\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:09:27Z is after 2025-08-24T17:21:41Z" Sep 29 19:09:27 crc kubenswrapper[4779]: I0929 19:09:27.461933 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:27 crc kubenswrapper[4779]: I0929 19:09:27.461999 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:27 crc kubenswrapper[4779]: I0929 19:09:27.462023 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:27 crc kubenswrapper[4779]: I0929 19:09:27.462051 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:27 crc kubenswrapper[4779]: I0929 19:09:27.462074 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:27Z","lastTransitionTime":"2025-09-29T19:09:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:27 crc kubenswrapper[4779]: E0929 19:09:27.480753 4779 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T19:09:27Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T19:09:27Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T19:09:27Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T19:09:27Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T19:09:27Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T19:09:27Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T19:09:27Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T19:09:27Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c0f08dfd-4d0c-4b55-a30c-6725bfe13689\\\",\\\"systemUUID\\\":\\\"d61591a8-214b-4be1-8c58-e9ade5216b62\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:09:27Z is after 2025-08-24T17:21:41Z" Sep 29 19:09:27 crc kubenswrapper[4779]: I0929 19:09:27.487047 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:27 crc kubenswrapper[4779]: I0929 19:09:27.487090 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:27 crc kubenswrapper[4779]: I0929 19:09:27.487107 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:27 crc kubenswrapper[4779]: I0929 19:09:27.487132 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:27 crc kubenswrapper[4779]: I0929 19:09:27.487150 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:27Z","lastTransitionTime":"2025-09-29T19:09:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:27 crc kubenswrapper[4779]: E0929 19:09:27.507818 4779 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T19:09:27Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T19:09:27Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T19:09:27Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T19:09:27Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T19:09:27Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T19:09:27Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T19:09:27Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T19:09:27Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c0f08dfd-4d0c-4b55-a30c-6725bfe13689\\\",\\\"systemUUID\\\":\\\"d61591a8-214b-4be1-8c58-e9ade5216b62\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:09:27Z is after 2025-08-24T17:21:41Z" Sep 29 19:09:27 crc kubenswrapper[4779]: I0929 19:09:27.513367 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:27 crc kubenswrapper[4779]: I0929 19:09:27.513433 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:27 crc kubenswrapper[4779]: I0929 19:09:27.513457 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:27 crc kubenswrapper[4779]: I0929 19:09:27.513489 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:27 crc kubenswrapper[4779]: I0929 19:09:27.513515 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:27Z","lastTransitionTime":"2025-09-29T19:09:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:27 crc kubenswrapper[4779]: E0929 19:09:27.527279 4779 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T19:09:27Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T19:09:27Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T19:09:27Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T19:09:27Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T19:09:27Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T19:09:27Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T19:09:27Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T19:09:27Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c0f08dfd-4d0c-4b55-a30c-6725bfe13689\\\",\\\"systemUUID\\\":\\\"d61591a8-214b-4be1-8c58-e9ade5216b62\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:09:27Z is after 2025-08-24T17:21:41Z" Sep 29 19:09:27 crc kubenswrapper[4779]: I0929 19:09:27.531949 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:27 crc kubenswrapper[4779]: I0929 19:09:27.532006 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:27 crc kubenswrapper[4779]: I0929 19:09:27.532024 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:27 crc kubenswrapper[4779]: I0929 19:09:27.532048 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:27 crc kubenswrapper[4779]: I0929 19:09:27.532065 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:27Z","lastTransitionTime":"2025-09-29T19:09:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:27 crc kubenswrapper[4779]: E0929 19:09:27.547150 4779 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T19:09:27Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T19:09:27Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T19:09:27Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T19:09:27Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T19:09:27Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T19:09:27Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T19:09:27Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T19:09:27Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c0f08dfd-4d0c-4b55-a30c-6725bfe13689\\\",\\\"systemUUID\\\":\\\"d61591a8-214b-4be1-8c58-e9ade5216b62\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:09:27Z is after 2025-08-24T17:21:41Z" Sep 29 19:09:27 crc kubenswrapper[4779]: E0929 19:09:27.547456 4779 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Sep 29 19:09:27 crc kubenswrapper[4779]: I0929 19:09:27.549151 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:27 crc kubenswrapper[4779]: I0929 19:09:27.549246 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:27 crc kubenswrapper[4779]: I0929 19:09:27.549271 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:27 crc kubenswrapper[4779]: I0929 19:09:27.549299 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:27 crc kubenswrapper[4779]: I0929 19:09:27.549393 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:27Z","lastTransitionTime":"2025-09-29T19:09:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:27 crc kubenswrapper[4779]: I0929 19:09:27.652188 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:27 crc kubenswrapper[4779]: I0929 19:09:27.652253 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:27 crc kubenswrapper[4779]: I0929 19:09:27.652270 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:27 crc kubenswrapper[4779]: I0929 19:09:27.652294 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:27 crc kubenswrapper[4779]: I0929 19:09:27.652312 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:27Z","lastTransitionTime":"2025-09-29T19:09:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:27 crc kubenswrapper[4779]: I0929 19:09:27.755939 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:27 crc kubenswrapper[4779]: I0929 19:09:27.756017 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:27 crc kubenswrapper[4779]: I0929 19:09:27.756040 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:27 crc kubenswrapper[4779]: I0929 19:09:27.756073 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:27 crc kubenswrapper[4779]: I0929 19:09:27.756097 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:27Z","lastTransitionTime":"2025-09-29T19:09:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:27 crc kubenswrapper[4779]: I0929 19:09:27.858954 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:27 crc kubenswrapper[4779]: I0929 19:09:27.859024 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:27 crc kubenswrapper[4779]: I0929 19:09:27.859041 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:27 crc kubenswrapper[4779]: I0929 19:09:27.859069 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:27 crc kubenswrapper[4779]: I0929 19:09:27.859090 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:27Z","lastTransitionTime":"2025-09-29T19:09:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:27 crc kubenswrapper[4779]: I0929 19:09:27.962549 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:27 crc kubenswrapper[4779]: I0929 19:09:27.962594 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:27 crc kubenswrapper[4779]: I0929 19:09:27.962602 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:27 crc kubenswrapper[4779]: I0929 19:09:27.962615 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:27 crc kubenswrapper[4779]: I0929 19:09:27.962624 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:27Z","lastTransitionTime":"2025-09-29T19:09:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:28 crc kubenswrapper[4779]: I0929 19:09:28.065624 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:28 crc kubenswrapper[4779]: I0929 19:09:28.065674 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:28 crc kubenswrapper[4779]: I0929 19:09:28.065690 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:28 crc kubenswrapper[4779]: I0929 19:09:28.065713 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:28 crc kubenswrapper[4779]: I0929 19:09:28.065730 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:28Z","lastTransitionTime":"2025-09-29T19:09:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:28 crc kubenswrapper[4779]: I0929 19:09:28.168707 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:28 crc kubenswrapper[4779]: I0929 19:09:28.168772 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:28 crc kubenswrapper[4779]: I0929 19:09:28.168813 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:28 crc kubenswrapper[4779]: I0929 19:09:28.168844 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:28 crc kubenswrapper[4779]: I0929 19:09:28.168873 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:28Z","lastTransitionTime":"2025-09-29T19:09:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:28 crc kubenswrapper[4779]: I0929 19:09:28.272872 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:28 crc kubenswrapper[4779]: I0929 19:09:28.272967 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:28 crc kubenswrapper[4779]: I0929 19:09:28.272987 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:28 crc kubenswrapper[4779]: I0929 19:09:28.273011 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:28 crc kubenswrapper[4779]: I0929 19:09:28.273031 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:28Z","lastTransitionTime":"2025-09-29T19:09:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:28 crc kubenswrapper[4779]: I0929 19:09:28.376501 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:28 crc kubenswrapper[4779]: I0929 19:09:28.376556 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:28 crc kubenswrapper[4779]: I0929 19:09:28.376573 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:28 crc kubenswrapper[4779]: I0929 19:09:28.376596 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:28 crc kubenswrapper[4779]: I0929 19:09:28.376616 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:28Z","lastTransitionTime":"2025-09-29T19:09:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:28 crc kubenswrapper[4779]: I0929 19:09:28.479523 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:28 crc kubenswrapper[4779]: I0929 19:09:28.479603 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:28 crc kubenswrapper[4779]: I0929 19:09:28.479626 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:28 crc kubenswrapper[4779]: I0929 19:09:28.479658 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:28 crc kubenswrapper[4779]: I0929 19:09:28.479684 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:28Z","lastTransitionTime":"2025-09-29T19:09:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:28 crc kubenswrapper[4779]: I0929 19:09:28.583649 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:28 crc kubenswrapper[4779]: I0929 19:09:28.583746 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:28 crc kubenswrapper[4779]: I0929 19:09:28.583772 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:28 crc kubenswrapper[4779]: I0929 19:09:28.583800 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:28 crc kubenswrapper[4779]: I0929 19:09:28.583827 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:28Z","lastTransitionTime":"2025-09-29T19:09:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:28 crc kubenswrapper[4779]: I0929 19:09:28.687143 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:28 crc kubenswrapper[4779]: I0929 19:09:28.687183 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:28 crc kubenswrapper[4779]: I0929 19:09:28.687195 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:28 crc kubenswrapper[4779]: I0929 19:09:28.687210 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:28 crc kubenswrapper[4779]: I0929 19:09:28.687221 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:28Z","lastTransitionTime":"2025-09-29T19:09:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:28 crc kubenswrapper[4779]: I0929 19:09:28.765845 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 19:09:28 crc kubenswrapper[4779]: I0929 19:09:28.765881 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 19:09:28 crc kubenswrapper[4779]: I0929 19:09:28.765961 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-2rtwf" Sep 29 19:09:28 crc kubenswrapper[4779]: I0929 19:09:28.765958 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 19:09:28 crc kubenswrapper[4779]: E0929 19:09:28.766858 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 19:09:28 crc kubenswrapper[4779]: E0929 19:09:28.767028 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-2rtwf" podUID="4df079c4-34e3-4132-91bb-ad68488552f8" Sep 29 19:09:28 crc kubenswrapper[4779]: E0929 19:09:28.767162 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 19:09:28 crc kubenswrapper[4779]: E0929 19:09:28.767262 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 19:09:28 crc kubenswrapper[4779]: I0929 19:09:28.790229 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:28 crc kubenswrapper[4779]: I0929 19:09:28.790305 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:28 crc kubenswrapper[4779]: I0929 19:09:28.790362 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:28 crc kubenswrapper[4779]: I0929 19:09:28.790464 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:28 crc kubenswrapper[4779]: I0929 19:09:28.790493 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:28Z","lastTransitionTime":"2025-09-29T19:09:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:28 crc kubenswrapper[4779]: I0929 19:09:28.893246 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:28 crc kubenswrapper[4779]: I0929 19:09:28.893309 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:28 crc kubenswrapper[4779]: I0929 19:09:28.893344 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:28 crc kubenswrapper[4779]: I0929 19:09:28.893365 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:28 crc kubenswrapper[4779]: I0929 19:09:28.893378 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:28Z","lastTransitionTime":"2025-09-29T19:09:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:28 crc kubenswrapper[4779]: I0929 19:09:28.997135 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:28 crc kubenswrapper[4779]: I0929 19:09:28.997221 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:28 crc kubenswrapper[4779]: I0929 19:09:28.997241 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:28 crc kubenswrapper[4779]: I0929 19:09:28.997264 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:28 crc kubenswrapper[4779]: I0929 19:09:28.997287 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:28Z","lastTransitionTime":"2025-09-29T19:09:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:29 crc kubenswrapper[4779]: I0929 19:09:29.100992 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:29 crc kubenswrapper[4779]: I0929 19:09:29.101063 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:29 crc kubenswrapper[4779]: I0929 19:09:29.101080 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:29 crc kubenswrapper[4779]: I0929 19:09:29.101105 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:29 crc kubenswrapper[4779]: I0929 19:09:29.101122 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:29Z","lastTransitionTime":"2025-09-29T19:09:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:29 crc kubenswrapper[4779]: I0929 19:09:29.203917 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:29 crc kubenswrapper[4779]: I0929 19:09:29.204387 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:29 crc kubenswrapper[4779]: I0929 19:09:29.204587 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:29 crc kubenswrapper[4779]: I0929 19:09:29.204770 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:29 crc kubenswrapper[4779]: I0929 19:09:29.204932 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:29Z","lastTransitionTime":"2025-09-29T19:09:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:29 crc kubenswrapper[4779]: I0929 19:09:29.307741 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:29 crc kubenswrapper[4779]: I0929 19:09:29.307783 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:29 crc kubenswrapper[4779]: I0929 19:09:29.307795 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:29 crc kubenswrapper[4779]: I0929 19:09:29.307811 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:29 crc kubenswrapper[4779]: I0929 19:09:29.307823 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:29Z","lastTransitionTime":"2025-09-29T19:09:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:29 crc kubenswrapper[4779]: I0929 19:09:29.411336 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:29 crc kubenswrapper[4779]: I0929 19:09:29.411404 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:29 crc kubenswrapper[4779]: I0929 19:09:29.411420 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:29 crc kubenswrapper[4779]: I0929 19:09:29.411448 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:29 crc kubenswrapper[4779]: I0929 19:09:29.411462 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:29Z","lastTransitionTime":"2025-09-29T19:09:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:29 crc kubenswrapper[4779]: I0929 19:09:29.514632 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:29 crc kubenswrapper[4779]: I0929 19:09:29.515424 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:29 crc kubenswrapper[4779]: I0929 19:09:29.515501 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:29 crc kubenswrapper[4779]: I0929 19:09:29.515529 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:29 crc kubenswrapper[4779]: I0929 19:09:29.515548 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:29Z","lastTransitionTime":"2025-09-29T19:09:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:29 crc kubenswrapper[4779]: I0929 19:09:29.618132 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:29 crc kubenswrapper[4779]: I0929 19:09:29.618469 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:29 crc kubenswrapper[4779]: I0929 19:09:29.618544 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:29 crc kubenswrapper[4779]: I0929 19:09:29.618640 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:29 crc kubenswrapper[4779]: I0929 19:09:29.618728 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:29Z","lastTransitionTime":"2025-09-29T19:09:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:29 crc kubenswrapper[4779]: I0929 19:09:29.721816 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:29 crc kubenswrapper[4779]: I0929 19:09:29.721875 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:29 crc kubenswrapper[4779]: I0929 19:09:29.721891 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:29 crc kubenswrapper[4779]: I0929 19:09:29.721913 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:29 crc kubenswrapper[4779]: I0929 19:09:29.721931 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:29Z","lastTransitionTime":"2025-09-29T19:09:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:29 crc kubenswrapper[4779]: I0929 19:09:29.766957 4779 scope.go:117] "RemoveContainer" containerID="23219c41b54182e6c1c57246636336210c37b9a2c9b3beedbb57daee3d909433" Sep 29 19:09:29 crc kubenswrapper[4779]: I0929 19:09:29.779075 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-2rtwf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4df079c4-34e3-4132-91bb-ad68488552f8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7sr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7sr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:44Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-2rtwf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:09:29Z is after 2025-08-24T17:21:41Z" Sep 29 19:09:29 crc kubenswrapper[4779]: I0929 19:09:29.782527 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd/etcd-crc"] Sep 29 19:09:29 crc kubenswrapper[4779]: I0929 19:09:29.794694 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:09:29Z is after 2025-08-24T17:21:41Z" Sep 29 19:09:29 crc kubenswrapper[4779]: I0929 19:09:29.809628 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://04d9dd8d319497dd3ed7c565217d8b9d228a1952a3cd0e8dee3cdad781c5770a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:09:29Z is after 2025-08-24T17:21:41Z" Sep 29 19:09:29 crc kubenswrapper[4779]: I0929 19:09:29.822122 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"476bc421-1113-455e-bcc8-e207e47dad19\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://30844145987b3dae28d2ca3f75a36e21c27216b267ff40fdb83094b8045403c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tb7zk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7168ecfffda2ef4f7782650466ccebb8abcff3293fcd33d44eb4ee24abf6339c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tb7zk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:30Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-d5cnr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:09:29Z is after 2025-08-24T17:21:41Z" Sep 29 19:09:29 crc kubenswrapper[4779]: I0929 19:09:29.824802 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:29 crc kubenswrapper[4779]: I0929 19:09:29.824832 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:29 crc kubenswrapper[4779]: I0929 19:09:29.824844 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:29 crc kubenswrapper[4779]: I0929 19:09:29.824858 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:29 crc kubenswrapper[4779]: I0929 19:09:29.824868 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:29Z","lastTransitionTime":"2025-09-29T19:09:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:29 crc kubenswrapper[4779]: I0929 19:09:29.840293 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jfbb6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3ac24bbf-c37a-4253-be71-8d8f15cfd48e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:09:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:09:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0a42b5ee3be8c80c6772c05f938a0f0be5896c66157b38f8b36cc3f9e03a950d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2ea6959ae00f851559cc059498a3a7848fbbb55f6a25ed82e38efe62a7ee85ca\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T19:09:18Z\\\",\\\"message\\\":\\\"2025-09-29T19:08:32+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_88611c75-0338-4025-8d9a-c7a4eb6f87da\\\\n2025-09-29T19:08:32+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_88611c75-0338-4025-8d9a-c7a4eb6f87da to /host/opt/cni/bin/\\\\n2025-09-29T19:08:33Z [verbose] multus-daemon started\\\\n2025-09-29T19:08:33Z [verbose] Readiness Indicator file check\\\\n2025-09-29T19:09:18Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:09:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8ckjq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:30Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jfbb6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:09:29Z is after 2025-08-24T17:21:41Z" Sep 29 19:09:29 crc kubenswrapper[4779]: I0929 19:09:29.863642 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-cvx8m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"54a33b8e-b623-4f91-be1d-a38dfcef17d7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://49599fb872eb458cb3fd77da63518f77653f671e82a8fc1db5f9e2b64e3c4d1d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e697e7c54a1b8adfde2dfa86399bfd13d895196c12108ac625bd9b94ad913d1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e697e7c54a1b8adfde2dfa86399bfd13d895196c12108ac625bd9b94ad913d1e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://75932bd27c986408eb2a3ec9aefffa392bf1ede4be0ddeabb9fdcd5fcc56199d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://75932bd27c986408eb2a3ec9aefffa392bf1ede4be0ddeabb9fdcd5fcc56199d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e058271bf65bb6f216298a9fc58b97c76335275829be7e5bec59d92688e83a05\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e058271bf65bb6f216298a9fc58b97c76335275829be7e5bec59d92688e83a05\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://edec2f7ae27e72dcc2eac62281dc9b866cdc31ecbda5e24020043aad86a44784\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://edec2f7ae27e72dcc2eac62281dc9b866cdc31ecbda5e24020043aad86a44784\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1750034ffcb61628b437e7b4f604be66ea6401d0ddb8a70f37fbb358c261670f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1750034ffcb61628b437e7b4f604be66ea6401d0ddb8a70f37fbb358c261670f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3409883fc5d9ed6b5e08d50c9d2821a4ac0b8932c9a0296c12065b0abb0354d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3409883fc5d9ed6b5e08d50c9d2821a4ac0b8932c9a0296c12065b0abb0354d6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:30Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-cvx8m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:09:29Z is after 2025-08-24T17:21:41Z" Sep 29 19:09:29 crc kubenswrapper[4779]: I0929 19:09:29.876547 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"49dd46af-a7fa-47b0-94ab-c9a999564fca\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a67efc46aeb5956568b680b4041d83a55c0410ccefdb4e43b95503c6416e1dfc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2253a3406f6de9463c1c615ad2ac76ce935067baa57b260f18afdb8d9e639e95\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa9b804f65dc9265342d6ec10b8cdd31d49f692289c7fd8292944e25a2c95ebe\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://02428a90ed2dd1c36d800ffda10ed8759d92a87ec8f467cdf9fb11511b9b9420\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:09Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:09:29Z is after 2025-08-24T17:21:41Z" Sep 29 19:09:29 crc kubenswrapper[4779]: I0929 19:09:29.887906 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ef5a50f788b42b32f4ec04eedc9f5c9dbcd07f64b430ec684d8a91b414e6c18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb910a5c408a63f6c4391cf2fae320e14d6dc945125b53eefde3b666ef9d0754\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:09:29Z is after 2025-08-24T17:21:41Z" Sep 29 19:09:29 crc kubenswrapper[4779]: I0929 19:09:29.899145 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://15b9c6a3d7c79b30673cd8040a9f3033e2405cf2e29b38d8ba2bee9cbb384794\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:09:29Z is after 2025-08-24T17:21:41Z" Sep 29 19:09:29 crc kubenswrapper[4779]: I0929 19:09:29.912599 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-zxpg4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e0db9b28-7e3a-4d44-9e98-1a07c8e5b8d6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2f0f837dfb05912fb4f7b0cb08cdb363cb3bfe2cc91a0bebd1b2b2b8128d78cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6ghv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:33Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-zxpg4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:09:29Z is after 2025-08-24T17:21:41Z" Sep 29 19:09:29 crc kubenswrapper[4779]: I0929 19:09:29.926632 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"35c2e46d-85f5-4dbc-b75c-ff3cab6cc941\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:09:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:09:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://341b30347851b6ce8681296d5622e32df85756f3a372075489023227f0b7828b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://72b9d027e1821e0de6cae982b60001ce62087f205bf5553d9d65c6cfe2d6e0c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://703b4d9d7a59ba642408a7920fd8c2753f1474f28acaa371e0a6b90c75bb7e91\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6fcb83ea7913513031d2c874a33ec806cf823bf8e3838378db3d9c94ca9229a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6fcb83ea7913513031d2c874a33ec806cf823bf8e3838378db3d9c94ca9229a6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:10Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:09Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:09:29Z is after 2025-08-24T17:21:41Z" Sep 29 19:09:29 crc kubenswrapper[4779]: I0929 19:09:29.927674 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:29 crc kubenswrapper[4779]: I0929 19:09:29.927702 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:29 crc kubenswrapper[4779]: I0929 19:09:29.927710 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:29 crc kubenswrapper[4779]: I0929 19:09:29.927726 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:29 crc kubenswrapper[4779]: I0929 19:09:29.927734 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:29Z","lastTransitionTime":"2025-09-29T19:09:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:29 crc kubenswrapper[4779]: I0929 19:09:29.943604 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2f701cb-2e7d-4cea-8455-f68605964ac6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:09:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:09:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dfa32890cde2a0a47211239f797f83380b4418ad2b8fb1560c779705e68df39c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a017f2621de03e0184a0a168819a6762e4b36a1ae5c66a6bcfd1d65266fcb52\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a9691127cc0437ceebef7a82219adb83625690db4d6bf5cf1c8177a6a9e1284\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://19af6b603d6261471bf10fbf65244aa6842b15a02fbbdea09a8ab2d01ec1e1a4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b14ae0b08f077b3acbe802e4a5ec06dd10075100c37d5587119a09a92b4651a\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T19:08:29Z\\\",\\\"message\\\":\\\"le observer\\\\nW0929 19:08:29.408025 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0929 19:08:29.408423 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 19:08:29.411188 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1038548135/tls.crt::/tmp/serving-cert-1038548135/tls.key\\\\\\\"\\\\nI0929 19:08:29.848353 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0929 19:08:29.866865 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0929 19:08:29.866980 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0929 19:08:29.867032 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0929 19:08:29.867061 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0929 19:08:29.872666 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0929 19:08:29.872766 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0929 19:08:29.872793 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0929 19:08:29.872817 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0929 19:08:29.872839 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0929 19:08:29.872861 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0929 19:08:29.872883 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0929 19:08:29.873077 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0929 19:08:29.875629 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:24Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f0916d37ad679d190403229b40390af2860e24f777a07cd5ef0ac2f7d3fc3c41\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5a737775c31cf8dc37838d904ee00f1075bc2aa52bea63587806cb7486950eaf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5a737775c31cf8dc37838d904ee00f1075bc2aa52bea63587806cb7486950eaf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:09Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:09:29Z is after 2025-08-24T17:21:41Z" Sep 29 19:09:29 crc kubenswrapper[4779]: I0929 19:09:29.959112 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:09:29Z is after 2025-08-24T17:21:41Z" Sep 29 19:09:29 crc kubenswrapper[4779]: I0929 19:09:29.976186 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-42vjg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"046df2ef-fb75-4d32-93e6-17b36af0a7c2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://629d703a556665e86ce3b0a5af995157440c71c8ab7d29b9259865cd624543c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1555e96e70a9119cf2298d91512b1d8cffb303b4327d0e20bdf686e7e36e4461\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0d18ddf7528c789be80d32cd6921ada5ed114d01130b7be97e73ec6d9168267e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b6ffbcb52a6e250cd4b4bc4e8c089ef94babaaf0db836fec9f477450464eebe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://76304a56be996b646d706c0e7df995bb1518962396c7558304b1627e5c215c11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6dcd0277739f6d5e5c90c617485f053c1c3996192f15b081c660741987ec2611\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://23219c41b54182e6c1c57246636336210c37b9a2c9b3beedbb57daee3d909433\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://23219c41b54182e6c1c57246636336210c37b9a2c9b3beedbb57daee3d909433\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T19:09:03Z\\\",\\\"message\\\":\\\"r network=default : 881.553µs\\\\nI0929 19:09:03.467164 6425 obj_retry.go:303] Retry object setup: *v1.Pod openshift-network-operator/network-operator-58b4c7f79c-55gtf\\\\nI0929 19:09:03.466800 6425 services_controller.go:356] Processing sync for service openshift-dns-operator/metrics for network=default\\\\nI0929 19:09:03.467178 6425 obj_retry.go:365] Adding new object: *v1.Pod openshift-network-operator/network-operator-58b4c7f79c-55gtf\\\\nI0929 19:09:03.467185 6425 ovn.go:134] Ensuring zone local for Pod openshift-network-operator/network-operator-58b4c7f79c-55gtf in node crc\\\\nI0929 19:09:03.467190 6425 obj_retry.go:386] Retry successful for *v1.Pod openshift-network-operator/network-operator-58b4c7f79c-55gtf after 0 failed attempt(s)\\\\nI0929 19:09:03.467195 6425 default_network_controller.go:776] Recording success event on pod openshift-network-operator/network-operator-58b4c7f79c-55gtf\\\\nI0929 19:09:03.466832 6425 services_controller.go:434] Service default/kubernetes retrieved from lister for network=default: \\\\u0026Service{ObjectMeta:{kubernetes default 1fcaffea-cfe2-4295-9c2a-a3b3626fb3f1 259 0 2025-02-23 05:11:12 +0000 UTC \\\\u003cnil\\\\u003e \\\\u003cnil\\\\u003e map[component:apiserver provider:kubernetes] map[] [] [] []},Spec:ServiceSpec{Ports:[]ServicePort{ServicePort{Name:https,Protocol:TCP,Port:443,TargetPort:{0 6443 },NodePort:0,AppProtocol:nil,},},Selector:m\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T19:09:02Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-42vjg_openshift-ovn-kubernetes(046df2ef-fb75-4d32-93e6-17b36af0a7c2)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c8993f5be0b77b752f60e7346f609aaeef20db2f0e72cbebde06a11fa7d2f5bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e1e03c63d08ebe2a4b76954140684a5b43da9e92b93490aa7efa715fc2aefb4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e1e03c63d08ebe2a4b76954140684a5b43da9e92b93490aa7efa715fc2aefb4c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:30Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-42vjg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:09:29Z is after 2025-08-24T17:21:41Z" Sep 29 19:09:29 crc kubenswrapper[4779]: I0929 19:09:29.985908 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-7hb2m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"45b89d12-bbd2-4b47-815d-a7421cc1aa00\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://002d12663649198e6136f3e9df16a3b1cd8c64906bf39bde03cd0dde8c36abbc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bvn4g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:30Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-7hb2m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:09:29Z is after 2025-08-24T17:21:41Z" Sep 29 19:09:29 crc kubenswrapper[4779]: I0929 19:09:29.996704 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:09:29Z is after 2025-08-24T17:21:41Z" Sep 29 19:09:30 crc kubenswrapper[4779]: I0929 19:09:30.007411 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fvdtc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"97c4781c-2d4b-4eab-96fb-39a342c2d4a0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c2179d9cf26eb6d6fcc63666d3dd53e9a1a66cf633c9fe0b29e6d6ac726c8119\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7cxcv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3aeed56672d12f2355eda710f9021c6271debfaf1f7b7d6b1ff5f4ecc6764e2e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7cxcv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:43Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-fvdtc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:09:30Z is after 2025-08-24T17:21:41Z" Sep 29 19:09:30 crc kubenswrapper[4779]: I0929 19:09:30.030156 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:30 crc kubenswrapper[4779]: I0929 19:09:30.030199 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:30 crc kubenswrapper[4779]: I0929 19:09:30.030210 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:30 crc kubenswrapper[4779]: I0929 19:09:30.030224 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:30 crc kubenswrapper[4779]: I0929 19:09:30.030235 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:30Z","lastTransitionTime":"2025-09-29T19:09:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:30 crc kubenswrapper[4779]: I0929 19:09:30.132879 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:30 crc kubenswrapper[4779]: I0929 19:09:30.132933 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:30 crc kubenswrapper[4779]: I0929 19:09:30.132948 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:30 crc kubenswrapper[4779]: I0929 19:09:30.132980 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:30 crc kubenswrapper[4779]: I0929 19:09:30.132995 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:30Z","lastTransitionTime":"2025-09-29T19:09:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:30 crc kubenswrapper[4779]: I0929 19:09:30.229581 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-42vjg_046df2ef-fb75-4d32-93e6-17b36af0a7c2/ovnkube-controller/2.log" Sep 29 19:09:30 crc kubenswrapper[4779]: I0929 19:09:30.233259 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-42vjg" event={"ID":"046df2ef-fb75-4d32-93e6-17b36af0a7c2","Type":"ContainerStarted","Data":"2e04b0f18f7180f57e083d45291ba413fcda4de8f92aa53189b296e6bc5c3905"} Sep 29 19:09:30 crc kubenswrapper[4779]: I0929 19:09:30.233860 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-42vjg" Sep 29 19:09:30 crc kubenswrapper[4779]: I0929 19:09:30.234798 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:30 crc kubenswrapper[4779]: I0929 19:09:30.234836 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:30 crc kubenswrapper[4779]: I0929 19:09:30.234876 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:30 crc kubenswrapper[4779]: I0929 19:09:30.234894 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:30 crc kubenswrapper[4779]: I0929 19:09:30.234907 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:30Z","lastTransitionTime":"2025-09-29T19:09:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:30 crc kubenswrapper[4779]: I0929 19:09:30.250341 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"49dd46af-a7fa-47b0-94ab-c9a999564fca\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a67efc46aeb5956568b680b4041d83a55c0410ccefdb4e43b95503c6416e1dfc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2253a3406f6de9463c1c615ad2ac76ce935067baa57b260f18afdb8d9e639e95\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa9b804f65dc9265342d6ec10b8cdd31d49f692289c7fd8292944e25a2c95ebe\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://02428a90ed2dd1c36d800ffda10ed8759d92a87ec8f467cdf9fb11511b9b9420\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:09Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:09:30Z is after 2025-08-24T17:21:41Z" Sep 29 19:09:30 crc kubenswrapper[4779]: I0929 19:09:30.263396 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ef5a50f788b42b32f4ec04eedc9f5c9dbcd07f64b430ec684d8a91b414e6c18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb910a5c408a63f6c4391cf2fae320e14d6dc945125b53eefde3b666ef9d0754\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:09:30Z is after 2025-08-24T17:21:41Z" Sep 29 19:09:30 crc kubenswrapper[4779]: I0929 19:09:30.278145 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://15b9c6a3d7c79b30673cd8040a9f3033e2405cf2e29b38d8ba2bee9cbb384794\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:09:30Z is after 2025-08-24T17:21:41Z" Sep 29 19:09:30 crc kubenswrapper[4779]: I0929 19:09:30.294964 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jfbb6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3ac24bbf-c37a-4253-be71-8d8f15cfd48e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:09:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:09:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0a42b5ee3be8c80c6772c05f938a0f0be5896c66157b38f8b36cc3f9e03a950d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2ea6959ae00f851559cc059498a3a7848fbbb55f6a25ed82e38efe62a7ee85ca\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T19:09:18Z\\\",\\\"message\\\":\\\"2025-09-29T19:08:32+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_88611c75-0338-4025-8d9a-c7a4eb6f87da\\\\n2025-09-29T19:08:32+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_88611c75-0338-4025-8d9a-c7a4eb6f87da to /host/opt/cni/bin/\\\\n2025-09-29T19:08:33Z [verbose] multus-daemon started\\\\n2025-09-29T19:08:33Z [verbose] Readiness Indicator file check\\\\n2025-09-29T19:09:18Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:09:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8ckjq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:30Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jfbb6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:09:30Z is after 2025-08-24T17:21:41Z" Sep 29 19:09:30 crc kubenswrapper[4779]: I0929 19:09:30.317558 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-cvx8m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"54a33b8e-b623-4f91-be1d-a38dfcef17d7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://49599fb872eb458cb3fd77da63518f77653f671e82a8fc1db5f9e2b64e3c4d1d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e697e7c54a1b8adfde2dfa86399bfd13d895196c12108ac625bd9b94ad913d1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e697e7c54a1b8adfde2dfa86399bfd13d895196c12108ac625bd9b94ad913d1e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://75932bd27c986408eb2a3ec9aefffa392bf1ede4be0ddeabb9fdcd5fcc56199d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://75932bd27c986408eb2a3ec9aefffa392bf1ede4be0ddeabb9fdcd5fcc56199d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e058271bf65bb6f216298a9fc58b97c76335275829be7e5bec59d92688e83a05\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e058271bf65bb6f216298a9fc58b97c76335275829be7e5bec59d92688e83a05\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://edec2f7ae27e72dcc2eac62281dc9b866cdc31ecbda5e24020043aad86a44784\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://edec2f7ae27e72dcc2eac62281dc9b866cdc31ecbda5e24020043aad86a44784\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1750034ffcb61628b437e7b4f604be66ea6401d0ddb8a70f37fbb358c261670f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1750034ffcb61628b437e7b4f604be66ea6401d0ddb8a70f37fbb358c261670f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3409883fc5d9ed6b5e08d50c9d2821a4ac0b8932c9a0296c12065b0abb0354d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3409883fc5d9ed6b5e08d50c9d2821a4ac0b8932c9a0296c12065b0abb0354d6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:30Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-cvx8m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:09:30Z is after 2025-08-24T17:21:41Z" Sep 29 19:09:30 crc kubenswrapper[4779]: I0929 19:09:30.329234 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"35c2e46d-85f5-4dbc-b75c-ff3cab6cc941\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:09:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:09:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://341b30347851b6ce8681296d5622e32df85756f3a372075489023227f0b7828b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://72b9d027e1821e0de6cae982b60001ce62087f205bf5553d9d65c6cfe2d6e0c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://703b4d9d7a59ba642408a7920fd8c2753f1474f28acaa371e0a6b90c75bb7e91\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6fcb83ea7913513031d2c874a33ec806cf823bf8e3838378db3d9c94ca9229a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6fcb83ea7913513031d2c874a33ec806cf823bf8e3838378db3d9c94ca9229a6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:10Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:09Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:09:30Z is after 2025-08-24T17:21:41Z" Sep 29 19:09:30 crc kubenswrapper[4779]: I0929 19:09:30.336710 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:30 crc kubenswrapper[4779]: I0929 19:09:30.336739 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:30 crc kubenswrapper[4779]: I0929 19:09:30.336747 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:30 crc kubenswrapper[4779]: I0929 19:09:30.336759 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:30 crc kubenswrapper[4779]: I0929 19:09:30.336769 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:30Z","lastTransitionTime":"2025-09-29T19:09:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:30 crc kubenswrapper[4779]: I0929 19:09:30.342106 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2f701cb-2e7d-4cea-8455-f68605964ac6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:09:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:09:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dfa32890cde2a0a47211239f797f83380b4418ad2b8fb1560c779705e68df39c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a017f2621de03e0184a0a168819a6762e4b36a1ae5c66a6bcfd1d65266fcb52\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a9691127cc0437ceebef7a82219adb83625690db4d6bf5cf1c8177a6a9e1284\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://19af6b603d6261471bf10fbf65244aa6842b15a02fbbdea09a8ab2d01ec1e1a4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b14ae0b08f077b3acbe802e4a5ec06dd10075100c37d5587119a09a92b4651a\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T19:08:29Z\\\",\\\"message\\\":\\\"le observer\\\\nW0929 19:08:29.408025 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0929 19:08:29.408423 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 19:08:29.411188 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1038548135/tls.crt::/tmp/serving-cert-1038548135/tls.key\\\\\\\"\\\\nI0929 19:08:29.848353 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0929 19:08:29.866865 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0929 19:08:29.866980 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0929 19:08:29.867032 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0929 19:08:29.867061 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0929 19:08:29.872666 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0929 19:08:29.872766 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0929 19:08:29.872793 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0929 19:08:29.872817 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0929 19:08:29.872839 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0929 19:08:29.872861 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0929 19:08:29.872883 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0929 19:08:29.873077 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0929 19:08:29.875629 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:24Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f0916d37ad679d190403229b40390af2860e24f777a07cd5ef0ac2f7d3fc3c41\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5a737775c31cf8dc37838d904ee00f1075bc2aa52bea63587806cb7486950eaf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5a737775c31cf8dc37838d904ee00f1075bc2aa52bea63587806cb7486950eaf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:09Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:09:30Z is after 2025-08-24T17:21:41Z" Sep 29 19:09:30 crc kubenswrapper[4779]: I0929 19:09:30.352433 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:09:30Z is after 2025-08-24T17:21:41Z" Sep 29 19:09:30 crc kubenswrapper[4779]: I0929 19:09:30.367673 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-42vjg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"046df2ef-fb75-4d32-93e6-17b36af0a7c2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://629d703a556665e86ce3b0a5af995157440c71c8ab7d29b9259865cd624543c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1555e96e70a9119cf2298d91512b1d8cffb303b4327d0e20bdf686e7e36e4461\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0d18ddf7528c789be80d32cd6921ada5ed114d01130b7be97e73ec6d9168267e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b6ffbcb52a6e250cd4b4bc4e8c089ef94babaaf0db836fec9f477450464eebe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://76304a56be996b646d706c0e7df995bb1518962396c7558304b1627e5c215c11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6dcd0277739f6d5e5c90c617485f053c1c3996192f15b081c660741987ec2611\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2e04b0f18f7180f57e083d45291ba413fcda4de8f92aa53189b296e6bc5c3905\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://23219c41b54182e6c1c57246636336210c37b9a2c9b3beedbb57daee3d909433\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T19:09:03Z\\\",\\\"message\\\":\\\"r network=default : 881.553µs\\\\nI0929 19:09:03.467164 6425 obj_retry.go:303] Retry object setup: *v1.Pod openshift-network-operator/network-operator-58b4c7f79c-55gtf\\\\nI0929 19:09:03.466800 6425 services_controller.go:356] Processing sync for service openshift-dns-operator/metrics for network=default\\\\nI0929 19:09:03.467178 6425 obj_retry.go:365] Adding new object: *v1.Pod openshift-network-operator/network-operator-58b4c7f79c-55gtf\\\\nI0929 19:09:03.467185 6425 ovn.go:134] Ensuring zone local for Pod openshift-network-operator/network-operator-58b4c7f79c-55gtf in node crc\\\\nI0929 19:09:03.467190 6425 obj_retry.go:386] Retry successful for *v1.Pod openshift-network-operator/network-operator-58b4c7f79c-55gtf after 0 failed attempt(s)\\\\nI0929 19:09:03.467195 6425 default_network_controller.go:776] Recording success event on pod openshift-network-operator/network-operator-58b4c7f79c-55gtf\\\\nI0929 19:09:03.466832 6425 services_controller.go:434] Service default/kubernetes retrieved from lister for network=default: \\\\u0026Service{ObjectMeta:{kubernetes default 1fcaffea-cfe2-4295-9c2a-a3b3626fb3f1 259 0 2025-02-23 05:11:12 +0000 UTC \\\\u003cnil\\\\u003e \\\\u003cnil\\\\u003e map[component:apiserver provider:kubernetes] map[] [] [] []},Spec:ServiceSpec{Ports:[]ServicePort{ServicePort{Name:https,Protocol:TCP,Port:443,TargetPort:{0 6443 },NodePort:0,AppProtocol:nil,},},Selector:m\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T19:09:02Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:09:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c8993f5be0b77b752f60e7346f609aaeef20db2f0e72cbebde06a11fa7d2f5bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e1e03c63d08ebe2a4b76954140684a5b43da9e92b93490aa7efa715fc2aefb4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e1e03c63d08ebe2a4b76954140684a5b43da9e92b93490aa7efa715fc2aefb4c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:30Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-42vjg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:09:30Z is after 2025-08-24T17:21:41Z" Sep 29 19:09:30 crc kubenswrapper[4779]: I0929 19:09:30.376942 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-zxpg4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e0db9b28-7e3a-4d44-9e98-1a07c8e5b8d6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2f0f837dfb05912fb4f7b0cb08cdb363cb3bfe2cc91a0bebd1b2b2b8128d78cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6ghv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:33Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-zxpg4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:09:30Z is after 2025-08-24T17:21:41Z" Sep 29 19:09:30 crc kubenswrapper[4779]: I0929 19:09:30.387796 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-7hb2m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"45b89d12-bbd2-4b47-815d-a7421cc1aa00\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://002d12663649198e6136f3e9df16a3b1cd8c64906bf39bde03cd0dde8c36abbc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bvn4g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:30Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-7hb2m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:09:30Z is after 2025-08-24T17:21:41Z" Sep 29 19:09:30 crc kubenswrapper[4779]: I0929 19:09:30.399685 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:09:30Z is after 2025-08-24T17:21:41Z" Sep 29 19:09:30 crc kubenswrapper[4779]: I0929 19:09:30.409573 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fvdtc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"97c4781c-2d4b-4eab-96fb-39a342c2d4a0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c2179d9cf26eb6d6fcc63666d3dd53e9a1a66cf633c9fe0b29e6d6ac726c8119\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7cxcv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3aeed56672d12f2355eda710f9021c6271debfaf1f7b7d6b1ff5f4ecc6764e2e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7cxcv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:43Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-fvdtc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:09:30Z is after 2025-08-24T17:21:41Z" Sep 29 19:09:30 crc kubenswrapper[4779]: I0929 19:09:30.432895 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8ccd862a-dc6a-4229-a5b4-c1a1fcb5a0a4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4598380e2d0b9f8258cb148abf8cfaa7a880d8aadc09c0d3416ab9ea976db3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://968e5da40da49dffb34c44e42d5a955975ab0fca1d9987312ee20b6c2ab42a52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b6423a6fa1e69da68b47af0b59980013d6bcc23b51d0b25bb0085fab03dc3e36\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6f5ea8a87c2318ebb245b796b274bda0d959d4666414d5b1bd6b03ffc309e141\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://92369b55bd00c0b1116fcb69584f4ba6d73f00511343902f243682db2cc6ff8b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b55ee00d3478e07a7efaeda5a4986cccbb1438bee9630adef3feebc7be94f204\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b55ee00d3478e07a7efaeda5a4986cccbb1438bee9630adef3feebc7be94f204\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://845c619c8e4a593f4f9339b4623d2d9ccb2d8f9ffe7fc763bf39c47e8028513b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://845c619c8e4a593f4f9339b4623d2d9ccb2d8f9ffe7fc763bf39c47e8028513b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:12Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://2c237a8c288e9ea7f9619f02915e0ca61bad4b3b5ffe9534bf2bc9fbda441302\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2c237a8c288e9ea7f9619f02915e0ca61bad4b3b5ffe9534bf2bc9fbda441302\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:09Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:09:30Z is after 2025-08-24T17:21:41Z" Sep 29 19:09:30 crc kubenswrapper[4779]: I0929 19:09:30.439197 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:30 crc kubenswrapper[4779]: I0929 19:09:30.439247 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:30 crc kubenswrapper[4779]: I0929 19:09:30.439260 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:30 crc kubenswrapper[4779]: I0929 19:09:30.439278 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:30 crc kubenswrapper[4779]: I0929 19:09:30.439290 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:30Z","lastTransitionTime":"2025-09-29T19:09:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:30 crc kubenswrapper[4779]: I0929 19:09:30.447924 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:09:30Z is after 2025-08-24T17:21:41Z" Sep 29 19:09:30 crc kubenswrapper[4779]: I0929 19:09:30.461264 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://04d9dd8d319497dd3ed7c565217d8b9d228a1952a3cd0e8dee3cdad781c5770a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:09:30Z is after 2025-08-24T17:21:41Z" Sep 29 19:09:30 crc kubenswrapper[4779]: I0929 19:09:30.471778 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"476bc421-1113-455e-bcc8-e207e47dad19\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://30844145987b3dae28d2ca3f75a36e21c27216b267ff40fdb83094b8045403c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tb7zk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7168ecfffda2ef4f7782650466ccebb8abcff3293fcd33d44eb4ee24abf6339c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tb7zk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:30Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-d5cnr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:09:30Z is after 2025-08-24T17:21:41Z" Sep 29 19:09:30 crc kubenswrapper[4779]: I0929 19:09:30.481396 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-2rtwf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4df079c4-34e3-4132-91bb-ad68488552f8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7sr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7sr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:44Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-2rtwf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:09:30Z is after 2025-08-24T17:21:41Z" Sep 29 19:09:30 crc kubenswrapper[4779]: I0929 19:09:30.541443 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:30 crc kubenswrapper[4779]: I0929 19:09:30.541498 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:30 crc kubenswrapper[4779]: I0929 19:09:30.541515 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:30 crc kubenswrapper[4779]: I0929 19:09:30.541533 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:30 crc kubenswrapper[4779]: I0929 19:09:30.541545 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:30Z","lastTransitionTime":"2025-09-29T19:09:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:30 crc kubenswrapper[4779]: I0929 19:09:30.644770 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:30 crc kubenswrapper[4779]: I0929 19:09:30.644813 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:30 crc kubenswrapper[4779]: I0929 19:09:30.644825 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:30 crc kubenswrapper[4779]: I0929 19:09:30.644843 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:30 crc kubenswrapper[4779]: I0929 19:09:30.644854 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:30Z","lastTransitionTime":"2025-09-29T19:09:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:30 crc kubenswrapper[4779]: I0929 19:09:30.748434 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:30 crc kubenswrapper[4779]: I0929 19:09:30.748816 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:30 crc kubenswrapper[4779]: I0929 19:09:30.748960 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:30 crc kubenswrapper[4779]: I0929 19:09:30.749116 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:30 crc kubenswrapper[4779]: I0929 19:09:30.749252 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:30Z","lastTransitionTime":"2025-09-29T19:09:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:30 crc kubenswrapper[4779]: I0929 19:09:30.765573 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 19:09:30 crc kubenswrapper[4779]: I0929 19:09:30.765669 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 19:09:30 crc kubenswrapper[4779]: E0929 19:09:30.765737 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 19:09:30 crc kubenswrapper[4779]: I0929 19:09:30.765684 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 19:09:30 crc kubenswrapper[4779]: E0929 19:09:30.765859 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 19:09:30 crc kubenswrapper[4779]: E0929 19:09:30.765954 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 19:09:30 crc kubenswrapper[4779]: I0929 19:09:30.766551 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-2rtwf" Sep 29 19:09:30 crc kubenswrapper[4779]: E0929 19:09:30.766721 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-2rtwf" podUID="4df079c4-34e3-4132-91bb-ad68488552f8" Sep 29 19:09:30 crc kubenswrapper[4779]: I0929 19:09:30.853780 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:30 crc kubenswrapper[4779]: I0929 19:09:30.853843 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:30 crc kubenswrapper[4779]: I0929 19:09:30.853857 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:30 crc kubenswrapper[4779]: I0929 19:09:30.853877 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:30 crc kubenswrapper[4779]: I0929 19:09:30.853891 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:30Z","lastTransitionTime":"2025-09-29T19:09:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:30 crc kubenswrapper[4779]: I0929 19:09:30.956571 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:30 crc kubenswrapper[4779]: I0929 19:09:30.956805 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:30 crc kubenswrapper[4779]: I0929 19:09:30.956815 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:30 crc kubenswrapper[4779]: I0929 19:09:30.956829 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:30 crc kubenswrapper[4779]: I0929 19:09:30.956838 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:30Z","lastTransitionTime":"2025-09-29T19:09:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:31 crc kubenswrapper[4779]: I0929 19:09:31.059742 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:31 crc kubenswrapper[4779]: I0929 19:09:31.059808 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:31 crc kubenswrapper[4779]: I0929 19:09:31.059827 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:31 crc kubenswrapper[4779]: I0929 19:09:31.059849 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:31 crc kubenswrapper[4779]: I0929 19:09:31.059865 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:31Z","lastTransitionTime":"2025-09-29T19:09:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:31 crc kubenswrapper[4779]: I0929 19:09:31.161867 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:31 crc kubenswrapper[4779]: I0929 19:09:31.161905 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:31 crc kubenswrapper[4779]: I0929 19:09:31.161917 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:31 crc kubenswrapper[4779]: I0929 19:09:31.161930 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:31 crc kubenswrapper[4779]: I0929 19:09:31.161940 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:31Z","lastTransitionTime":"2025-09-29T19:09:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:31 crc kubenswrapper[4779]: I0929 19:09:31.238208 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-42vjg_046df2ef-fb75-4d32-93e6-17b36af0a7c2/ovnkube-controller/3.log" Sep 29 19:09:31 crc kubenswrapper[4779]: I0929 19:09:31.239023 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-42vjg_046df2ef-fb75-4d32-93e6-17b36af0a7c2/ovnkube-controller/2.log" Sep 29 19:09:31 crc kubenswrapper[4779]: I0929 19:09:31.243515 4779 generic.go:334] "Generic (PLEG): container finished" podID="046df2ef-fb75-4d32-93e6-17b36af0a7c2" containerID="2e04b0f18f7180f57e083d45291ba413fcda4de8f92aa53189b296e6bc5c3905" exitCode=1 Sep 29 19:09:31 crc kubenswrapper[4779]: I0929 19:09:31.243599 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-42vjg" event={"ID":"046df2ef-fb75-4d32-93e6-17b36af0a7c2","Type":"ContainerDied","Data":"2e04b0f18f7180f57e083d45291ba413fcda4de8f92aa53189b296e6bc5c3905"} Sep 29 19:09:31 crc kubenswrapper[4779]: I0929 19:09:31.243670 4779 scope.go:117] "RemoveContainer" containerID="23219c41b54182e6c1c57246636336210c37b9a2c9b3beedbb57daee3d909433" Sep 29 19:09:31 crc kubenswrapper[4779]: I0929 19:09:31.244370 4779 scope.go:117] "RemoveContainer" containerID="2e04b0f18f7180f57e083d45291ba413fcda4de8f92aa53189b296e6bc5c3905" Sep 29 19:09:31 crc kubenswrapper[4779]: E0929 19:09:31.244558 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-42vjg_openshift-ovn-kubernetes(046df2ef-fb75-4d32-93e6-17b36af0a7c2)\"" pod="openshift-ovn-kubernetes/ovnkube-node-42vjg" podUID="046df2ef-fb75-4d32-93e6-17b36af0a7c2" Sep 29 19:09:31 crc kubenswrapper[4779]: I0929 19:09:31.263148 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:09:31Z is after 2025-08-24T17:21:41Z" Sep 29 19:09:31 crc kubenswrapper[4779]: I0929 19:09:31.264021 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:31 crc kubenswrapper[4779]: I0929 19:09:31.264055 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:31 crc kubenswrapper[4779]: I0929 19:09:31.264070 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:31 crc kubenswrapper[4779]: I0929 19:09:31.264091 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:31 crc kubenswrapper[4779]: I0929 19:09:31.264105 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:31Z","lastTransitionTime":"2025-09-29T19:09:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:31 crc kubenswrapper[4779]: I0929 19:09:31.279800 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://04d9dd8d319497dd3ed7c565217d8b9d228a1952a3cd0e8dee3cdad781c5770a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:09:31Z is after 2025-08-24T17:21:41Z" Sep 29 19:09:31 crc kubenswrapper[4779]: I0929 19:09:31.294052 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"476bc421-1113-455e-bcc8-e207e47dad19\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://30844145987b3dae28d2ca3f75a36e21c27216b267ff40fdb83094b8045403c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tb7zk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7168ecfffda2ef4f7782650466ccebb8abcff3293fcd33d44eb4ee24abf6339c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tb7zk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:30Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-d5cnr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:09:31Z is after 2025-08-24T17:21:41Z" Sep 29 19:09:31 crc kubenswrapper[4779]: I0929 19:09:31.310901 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-2rtwf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4df079c4-34e3-4132-91bb-ad68488552f8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7sr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7sr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:44Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-2rtwf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:09:31Z is after 2025-08-24T17:21:41Z" Sep 29 19:09:31 crc kubenswrapper[4779]: I0929 19:09:31.334434 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8ccd862a-dc6a-4229-a5b4-c1a1fcb5a0a4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4598380e2d0b9f8258cb148abf8cfaa7a880d8aadc09c0d3416ab9ea976db3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://968e5da40da49dffb34c44e42d5a955975ab0fca1d9987312ee20b6c2ab42a52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b6423a6fa1e69da68b47af0b59980013d6bcc23b51d0b25bb0085fab03dc3e36\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6f5ea8a87c2318ebb245b796b274bda0d959d4666414d5b1bd6b03ffc309e141\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://92369b55bd00c0b1116fcb69584f4ba6d73f00511343902f243682db2cc6ff8b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b55ee00d3478e07a7efaeda5a4986cccbb1438bee9630adef3feebc7be94f204\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b55ee00d3478e07a7efaeda5a4986cccbb1438bee9630adef3feebc7be94f204\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://845c619c8e4a593f4f9339b4623d2d9ccb2d8f9ffe7fc763bf39c47e8028513b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://845c619c8e4a593f4f9339b4623d2d9ccb2d8f9ffe7fc763bf39c47e8028513b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:12Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://2c237a8c288e9ea7f9619f02915e0ca61bad4b3b5ffe9534bf2bc9fbda441302\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2c237a8c288e9ea7f9619f02915e0ca61bad4b3b5ffe9534bf2bc9fbda441302\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:09Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:09:31Z is after 2025-08-24T17:21:41Z" Sep 29 19:09:31 crc kubenswrapper[4779]: I0929 19:09:31.348225 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"49dd46af-a7fa-47b0-94ab-c9a999564fca\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a67efc46aeb5956568b680b4041d83a55c0410ccefdb4e43b95503c6416e1dfc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2253a3406f6de9463c1c615ad2ac76ce935067baa57b260f18afdb8d9e639e95\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa9b804f65dc9265342d6ec10b8cdd31d49f692289c7fd8292944e25a2c95ebe\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://02428a90ed2dd1c36d800ffda10ed8759d92a87ec8f467cdf9fb11511b9b9420\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:09Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:09:31Z is after 2025-08-24T17:21:41Z" Sep 29 19:09:31 crc kubenswrapper[4779]: I0929 19:09:31.361947 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ef5a50f788b42b32f4ec04eedc9f5c9dbcd07f64b430ec684d8a91b414e6c18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb910a5c408a63f6c4391cf2fae320e14d6dc945125b53eefde3b666ef9d0754\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:09:31Z is after 2025-08-24T17:21:41Z" Sep 29 19:09:31 crc kubenswrapper[4779]: I0929 19:09:31.365725 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:31 crc kubenswrapper[4779]: I0929 19:09:31.365763 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:31 crc kubenswrapper[4779]: I0929 19:09:31.365775 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:31 crc kubenswrapper[4779]: I0929 19:09:31.365798 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:31 crc kubenswrapper[4779]: I0929 19:09:31.365814 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:31Z","lastTransitionTime":"2025-09-29T19:09:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:31 crc kubenswrapper[4779]: I0929 19:09:31.375357 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://15b9c6a3d7c79b30673cd8040a9f3033e2405cf2e29b38d8ba2bee9cbb384794\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:09:31Z is after 2025-08-24T17:21:41Z" Sep 29 19:09:31 crc kubenswrapper[4779]: I0929 19:09:31.393571 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jfbb6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3ac24bbf-c37a-4253-be71-8d8f15cfd48e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:09:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:09:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0a42b5ee3be8c80c6772c05f938a0f0be5896c66157b38f8b36cc3f9e03a950d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2ea6959ae00f851559cc059498a3a7848fbbb55f6a25ed82e38efe62a7ee85ca\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T19:09:18Z\\\",\\\"message\\\":\\\"2025-09-29T19:08:32+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_88611c75-0338-4025-8d9a-c7a4eb6f87da\\\\n2025-09-29T19:08:32+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_88611c75-0338-4025-8d9a-c7a4eb6f87da to /host/opt/cni/bin/\\\\n2025-09-29T19:08:33Z [verbose] multus-daemon started\\\\n2025-09-29T19:08:33Z [verbose] Readiness Indicator file check\\\\n2025-09-29T19:09:18Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:09:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8ckjq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:30Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jfbb6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:09:31Z is after 2025-08-24T17:21:41Z" Sep 29 19:09:31 crc kubenswrapper[4779]: I0929 19:09:31.416301 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-cvx8m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"54a33b8e-b623-4f91-be1d-a38dfcef17d7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://49599fb872eb458cb3fd77da63518f77653f671e82a8fc1db5f9e2b64e3c4d1d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e697e7c54a1b8adfde2dfa86399bfd13d895196c12108ac625bd9b94ad913d1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e697e7c54a1b8adfde2dfa86399bfd13d895196c12108ac625bd9b94ad913d1e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://75932bd27c986408eb2a3ec9aefffa392bf1ede4be0ddeabb9fdcd5fcc56199d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://75932bd27c986408eb2a3ec9aefffa392bf1ede4be0ddeabb9fdcd5fcc56199d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e058271bf65bb6f216298a9fc58b97c76335275829be7e5bec59d92688e83a05\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e058271bf65bb6f216298a9fc58b97c76335275829be7e5bec59d92688e83a05\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://edec2f7ae27e72dcc2eac62281dc9b866cdc31ecbda5e24020043aad86a44784\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://edec2f7ae27e72dcc2eac62281dc9b866cdc31ecbda5e24020043aad86a44784\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1750034ffcb61628b437e7b4f604be66ea6401d0ddb8a70f37fbb358c261670f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1750034ffcb61628b437e7b4f604be66ea6401d0ddb8a70f37fbb358c261670f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3409883fc5d9ed6b5e08d50c9d2821a4ac0b8932c9a0296c12065b0abb0354d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3409883fc5d9ed6b5e08d50c9d2821a4ac0b8932c9a0296c12065b0abb0354d6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:30Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-cvx8m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:09:31Z is after 2025-08-24T17:21:41Z" Sep 29 19:09:31 crc kubenswrapper[4779]: I0929 19:09:31.435566 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2f701cb-2e7d-4cea-8455-f68605964ac6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:09:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:09:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dfa32890cde2a0a47211239f797f83380b4418ad2b8fb1560c779705e68df39c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a017f2621de03e0184a0a168819a6762e4b36a1ae5c66a6bcfd1d65266fcb52\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a9691127cc0437ceebef7a82219adb83625690db4d6bf5cf1c8177a6a9e1284\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://19af6b603d6261471bf10fbf65244aa6842b15a02fbbdea09a8ab2d01ec1e1a4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b14ae0b08f077b3acbe802e4a5ec06dd10075100c37d5587119a09a92b4651a\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T19:08:29Z\\\",\\\"message\\\":\\\"le observer\\\\nW0929 19:08:29.408025 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0929 19:08:29.408423 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 19:08:29.411188 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1038548135/tls.crt::/tmp/serving-cert-1038548135/tls.key\\\\\\\"\\\\nI0929 19:08:29.848353 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0929 19:08:29.866865 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0929 19:08:29.866980 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0929 19:08:29.867032 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0929 19:08:29.867061 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0929 19:08:29.872666 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0929 19:08:29.872766 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0929 19:08:29.872793 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0929 19:08:29.872817 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0929 19:08:29.872839 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0929 19:08:29.872861 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0929 19:08:29.872883 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0929 19:08:29.873077 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0929 19:08:29.875629 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:24Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f0916d37ad679d190403229b40390af2860e24f777a07cd5ef0ac2f7d3fc3c41\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5a737775c31cf8dc37838d904ee00f1075bc2aa52bea63587806cb7486950eaf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5a737775c31cf8dc37838d904ee00f1075bc2aa52bea63587806cb7486950eaf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:09Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:09:31Z is after 2025-08-24T17:21:41Z" Sep 29 19:09:31 crc kubenswrapper[4779]: I0929 19:09:31.448349 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:09:31Z is after 2025-08-24T17:21:41Z" Sep 29 19:09:31 crc kubenswrapper[4779]: I0929 19:09:31.467706 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:31 crc kubenswrapper[4779]: I0929 19:09:31.467762 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:31 crc kubenswrapper[4779]: I0929 19:09:31.467789 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:31 crc kubenswrapper[4779]: I0929 19:09:31.467807 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:31 crc kubenswrapper[4779]: I0929 19:09:31.467818 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:31Z","lastTransitionTime":"2025-09-29T19:09:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:31 crc kubenswrapper[4779]: I0929 19:09:31.475804 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-42vjg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"046df2ef-fb75-4d32-93e6-17b36af0a7c2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://629d703a556665e86ce3b0a5af995157440c71c8ab7d29b9259865cd624543c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1555e96e70a9119cf2298d91512b1d8cffb303b4327d0e20bdf686e7e36e4461\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0d18ddf7528c789be80d32cd6921ada5ed114d01130b7be97e73ec6d9168267e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b6ffbcb52a6e250cd4b4bc4e8c089ef94babaaf0db836fec9f477450464eebe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://76304a56be996b646d706c0e7df995bb1518962396c7558304b1627e5c215c11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6dcd0277739f6d5e5c90c617485f053c1c3996192f15b081c660741987ec2611\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2e04b0f18f7180f57e083d45291ba413fcda4de8f92aa53189b296e6bc5c3905\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://23219c41b54182e6c1c57246636336210c37b9a2c9b3beedbb57daee3d909433\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T19:09:03Z\\\",\\\"message\\\":\\\"r network=default : 881.553µs\\\\nI0929 19:09:03.467164 6425 obj_retry.go:303] Retry object setup: *v1.Pod openshift-network-operator/network-operator-58b4c7f79c-55gtf\\\\nI0929 19:09:03.466800 6425 services_controller.go:356] Processing sync for service openshift-dns-operator/metrics for network=default\\\\nI0929 19:09:03.467178 6425 obj_retry.go:365] Adding new object: *v1.Pod openshift-network-operator/network-operator-58b4c7f79c-55gtf\\\\nI0929 19:09:03.467185 6425 ovn.go:134] Ensuring zone local for Pod openshift-network-operator/network-operator-58b4c7f79c-55gtf in node crc\\\\nI0929 19:09:03.467190 6425 obj_retry.go:386] Retry successful for *v1.Pod openshift-network-operator/network-operator-58b4c7f79c-55gtf after 0 failed attempt(s)\\\\nI0929 19:09:03.467195 6425 default_network_controller.go:776] Recording success event on pod openshift-network-operator/network-operator-58b4c7f79c-55gtf\\\\nI0929 19:09:03.466832 6425 services_controller.go:434] Service default/kubernetes retrieved from lister for network=default: \\\\u0026Service{ObjectMeta:{kubernetes default 1fcaffea-cfe2-4295-9c2a-a3b3626fb3f1 259 0 2025-02-23 05:11:12 +0000 UTC \\\\u003cnil\\\\u003e \\\\u003cnil\\\\u003e map[component:apiserver provider:kubernetes] map[] [] [] []},Spec:ServiceSpec{Ports:[]ServicePort{ServicePort{Name:https,Protocol:TCP,Port:443,TargetPort:{0 6443 },NodePort:0,AppProtocol:nil,},},Selector:m\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T19:09:02Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e04b0f18f7180f57e083d45291ba413fcda4de8f92aa53189b296e6bc5c3905\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T19:09:30Z\\\",\\\"message\\\":\\\"ssionAffinity:None,LoadBalancerIP:,LoadBalancerSourceRanges:[],ExternalName:,ExternalTrafficPolicy:,HealthCheckNodePort:0,PublishNotReadyAddresses:false,SessionAffinityConfig:nil,IPFamilyPolicy:*SingleStack,ClusterIPs:[10.217.4.140],IPFamilies:[IPv4],AllocateLoadBalancerNodePorts:nil,LoadBalancerClass:nil,InternalTrafficPolicy:*Cluster,TrafficDistribution:nil,},Status:ServiceStatus{LoadBalancer:LoadBalancerStatus{Ingress:[]LoadBalancerIngress{},},Conditions:[]Condition{},},}\\\\nI0929 19:09:30.665638 6786 obj_retry.go:386] Retry successful for *v1.Pod openshift-ovn-kubernetes/ovnkube-node-42vjg after 0 failed attempt(s)\\\\nI0929 19:09:30.665643 6786 obj_retry.go:303] Retry object setup: *v1.Pod openshift-network-console/networking-console-plugin-85b44fc459-gdk6g\\\\nF0929 19:09:30.665574 6786 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.netw\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T19:09:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c8993f5be0b77b752f60e7346f609aaeef20db2f0e72cbebde06a11fa7d2f5bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e1e03c63d08ebe2a4b76954140684a5b43da9e92b93490aa7efa715fc2aefb4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e1e03c63d08ebe2a4b76954140684a5b43da9e92b93490aa7efa715fc2aefb4c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:30Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-42vjg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:09:31Z is after 2025-08-24T17:21:41Z" Sep 29 19:09:31 crc kubenswrapper[4779]: I0929 19:09:31.487930 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-zxpg4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e0db9b28-7e3a-4d44-9e98-1a07c8e5b8d6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2f0f837dfb05912fb4f7b0cb08cdb363cb3bfe2cc91a0bebd1b2b2b8128d78cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6ghv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:33Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-zxpg4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:09:31Z is after 2025-08-24T17:21:41Z" Sep 29 19:09:31 crc kubenswrapper[4779]: I0929 19:09:31.500545 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"35c2e46d-85f5-4dbc-b75c-ff3cab6cc941\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:09:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:09:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://341b30347851b6ce8681296d5622e32df85756f3a372075489023227f0b7828b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://72b9d027e1821e0de6cae982b60001ce62087f205bf5553d9d65c6cfe2d6e0c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://703b4d9d7a59ba642408a7920fd8c2753f1474f28acaa371e0a6b90c75bb7e91\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6fcb83ea7913513031d2c874a33ec806cf823bf8e3838378db3d9c94ca9229a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6fcb83ea7913513031d2c874a33ec806cf823bf8e3838378db3d9c94ca9229a6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:10Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:09Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:09:31Z is after 2025-08-24T17:21:41Z" Sep 29 19:09:31 crc kubenswrapper[4779]: I0929 19:09:31.517616 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:09:31Z is after 2025-08-24T17:21:41Z" Sep 29 19:09:31 crc kubenswrapper[4779]: I0929 19:09:31.530845 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fvdtc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"97c4781c-2d4b-4eab-96fb-39a342c2d4a0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c2179d9cf26eb6d6fcc63666d3dd53e9a1a66cf633c9fe0b29e6d6ac726c8119\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7cxcv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3aeed56672d12f2355eda710f9021c6271debfaf1f7b7d6b1ff5f4ecc6764e2e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7cxcv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:43Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-fvdtc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:09:31Z is after 2025-08-24T17:21:41Z" Sep 29 19:09:31 crc kubenswrapper[4779]: I0929 19:09:31.543606 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-7hb2m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"45b89d12-bbd2-4b47-815d-a7421cc1aa00\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://002d12663649198e6136f3e9df16a3b1cd8c64906bf39bde03cd0dde8c36abbc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bvn4g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:30Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-7hb2m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:09:31Z is after 2025-08-24T17:21:41Z" Sep 29 19:09:31 crc kubenswrapper[4779]: I0929 19:09:31.569614 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:31 crc kubenswrapper[4779]: I0929 19:09:31.569655 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:31 crc kubenswrapper[4779]: I0929 19:09:31.569667 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:31 crc kubenswrapper[4779]: I0929 19:09:31.569683 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:31 crc kubenswrapper[4779]: I0929 19:09:31.569694 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:31Z","lastTransitionTime":"2025-09-29T19:09:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:31 crc kubenswrapper[4779]: I0929 19:09:31.672414 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:31 crc kubenswrapper[4779]: I0929 19:09:31.672475 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:31 crc kubenswrapper[4779]: I0929 19:09:31.672489 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:31 crc kubenswrapper[4779]: I0929 19:09:31.672512 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:31 crc kubenswrapper[4779]: I0929 19:09:31.672527 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:31Z","lastTransitionTime":"2025-09-29T19:09:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:31 crc kubenswrapper[4779]: I0929 19:09:31.774374 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:31 crc kubenswrapper[4779]: I0929 19:09:31.774478 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:31 crc kubenswrapper[4779]: I0929 19:09:31.774497 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:31 crc kubenswrapper[4779]: I0929 19:09:31.774523 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:31 crc kubenswrapper[4779]: I0929 19:09:31.774544 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:31Z","lastTransitionTime":"2025-09-29T19:09:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:31 crc kubenswrapper[4779]: I0929 19:09:31.877629 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:31 crc kubenswrapper[4779]: I0929 19:09:31.877687 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:31 crc kubenswrapper[4779]: I0929 19:09:31.877709 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:31 crc kubenswrapper[4779]: I0929 19:09:31.877737 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:31 crc kubenswrapper[4779]: I0929 19:09:31.877758 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:31Z","lastTransitionTime":"2025-09-29T19:09:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:31 crc kubenswrapper[4779]: I0929 19:09:31.980215 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:31 crc kubenswrapper[4779]: I0929 19:09:31.980275 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:31 crc kubenswrapper[4779]: I0929 19:09:31.980291 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:31 crc kubenswrapper[4779]: I0929 19:09:31.980311 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:31 crc kubenswrapper[4779]: I0929 19:09:31.980374 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:31Z","lastTransitionTime":"2025-09-29T19:09:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:32 crc kubenswrapper[4779]: I0929 19:09:32.083870 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:32 crc kubenswrapper[4779]: I0929 19:09:32.083944 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:32 crc kubenswrapper[4779]: I0929 19:09:32.083966 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:32 crc kubenswrapper[4779]: I0929 19:09:32.083990 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:32 crc kubenswrapper[4779]: I0929 19:09:32.084007 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:32Z","lastTransitionTime":"2025-09-29T19:09:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:32 crc kubenswrapper[4779]: I0929 19:09:32.187846 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:32 crc kubenswrapper[4779]: I0929 19:09:32.187935 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:32 crc kubenswrapper[4779]: I0929 19:09:32.187949 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:32 crc kubenswrapper[4779]: I0929 19:09:32.187967 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:32 crc kubenswrapper[4779]: I0929 19:09:32.187980 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:32Z","lastTransitionTime":"2025-09-29T19:09:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:32 crc kubenswrapper[4779]: I0929 19:09:32.249457 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-42vjg_046df2ef-fb75-4d32-93e6-17b36af0a7c2/ovnkube-controller/3.log" Sep 29 19:09:32 crc kubenswrapper[4779]: I0929 19:09:32.254017 4779 scope.go:117] "RemoveContainer" containerID="2e04b0f18f7180f57e083d45291ba413fcda4de8f92aa53189b296e6bc5c3905" Sep 29 19:09:32 crc kubenswrapper[4779]: E0929 19:09:32.254272 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-42vjg_openshift-ovn-kubernetes(046df2ef-fb75-4d32-93e6-17b36af0a7c2)\"" pod="openshift-ovn-kubernetes/ovnkube-node-42vjg" podUID="046df2ef-fb75-4d32-93e6-17b36af0a7c2" Sep 29 19:09:32 crc kubenswrapper[4779]: I0929 19:09:32.269390 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"35c2e46d-85f5-4dbc-b75c-ff3cab6cc941\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:09:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:09:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://341b30347851b6ce8681296d5622e32df85756f3a372075489023227f0b7828b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://72b9d027e1821e0de6cae982b60001ce62087f205bf5553d9d65c6cfe2d6e0c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://703b4d9d7a59ba642408a7920fd8c2753f1474f28acaa371e0a6b90c75bb7e91\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6fcb83ea7913513031d2c874a33ec806cf823bf8e3838378db3d9c94ca9229a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6fcb83ea7913513031d2c874a33ec806cf823bf8e3838378db3d9c94ca9229a6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:10Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:09Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:09:32Z is after 2025-08-24T17:21:41Z" Sep 29 19:09:32 crc kubenswrapper[4779]: I0929 19:09:32.290224 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2f701cb-2e7d-4cea-8455-f68605964ac6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:09:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:09:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dfa32890cde2a0a47211239f797f83380b4418ad2b8fb1560c779705e68df39c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a017f2621de03e0184a0a168819a6762e4b36a1ae5c66a6bcfd1d65266fcb52\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a9691127cc0437ceebef7a82219adb83625690db4d6bf5cf1c8177a6a9e1284\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://19af6b603d6261471bf10fbf65244aa6842b15a02fbbdea09a8ab2d01ec1e1a4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b14ae0b08f077b3acbe802e4a5ec06dd10075100c37d5587119a09a92b4651a\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T19:08:29Z\\\",\\\"message\\\":\\\"le observer\\\\nW0929 19:08:29.408025 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0929 19:08:29.408423 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 19:08:29.411188 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1038548135/tls.crt::/tmp/serving-cert-1038548135/tls.key\\\\\\\"\\\\nI0929 19:08:29.848353 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0929 19:08:29.866865 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0929 19:08:29.866980 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0929 19:08:29.867032 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0929 19:08:29.867061 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0929 19:08:29.872666 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0929 19:08:29.872766 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0929 19:08:29.872793 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0929 19:08:29.872817 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0929 19:08:29.872839 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0929 19:08:29.872861 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0929 19:08:29.872883 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0929 19:08:29.873077 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0929 19:08:29.875629 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:24Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f0916d37ad679d190403229b40390af2860e24f777a07cd5ef0ac2f7d3fc3c41\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5a737775c31cf8dc37838d904ee00f1075bc2aa52bea63587806cb7486950eaf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5a737775c31cf8dc37838d904ee00f1075bc2aa52bea63587806cb7486950eaf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:09Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:09:32Z is after 2025-08-24T17:21:41Z" Sep 29 19:09:32 crc kubenswrapper[4779]: I0929 19:09:32.290879 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:32 crc kubenswrapper[4779]: I0929 19:09:32.290916 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:32 crc kubenswrapper[4779]: I0929 19:09:32.290935 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:32 crc kubenswrapper[4779]: I0929 19:09:32.290957 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:32 crc kubenswrapper[4779]: I0929 19:09:32.290971 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:32Z","lastTransitionTime":"2025-09-29T19:09:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:32 crc kubenswrapper[4779]: I0929 19:09:32.303971 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:09:32Z is after 2025-08-24T17:21:41Z" Sep 29 19:09:32 crc kubenswrapper[4779]: I0929 19:09:32.324861 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-42vjg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"046df2ef-fb75-4d32-93e6-17b36af0a7c2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://629d703a556665e86ce3b0a5af995157440c71c8ab7d29b9259865cd624543c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1555e96e70a9119cf2298d91512b1d8cffb303b4327d0e20bdf686e7e36e4461\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0d18ddf7528c789be80d32cd6921ada5ed114d01130b7be97e73ec6d9168267e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b6ffbcb52a6e250cd4b4bc4e8c089ef94babaaf0db836fec9f477450464eebe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://76304a56be996b646d706c0e7df995bb1518962396c7558304b1627e5c215c11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6dcd0277739f6d5e5c90c617485f053c1c3996192f15b081c660741987ec2611\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2e04b0f18f7180f57e083d45291ba413fcda4de8f92aa53189b296e6bc5c3905\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e04b0f18f7180f57e083d45291ba413fcda4de8f92aa53189b296e6bc5c3905\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T19:09:30Z\\\",\\\"message\\\":\\\"ssionAffinity:None,LoadBalancerIP:,LoadBalancerSourceRanges:[],ExternalName:,ExternalTrafficPolicy:,HealthCheckNodePort:0,PublishNotReadyAddresses:false,SessionAffinityConfig:nil,IPFamilyPolicy:*SingleStack,ClusterIPs:[10.217.4.140],IPFamilies:[IPv4],AllocateLoadBalancerNodePorts:nil,LoadBalancerClass:nil,InternalTrafficPolicy:*Cluster,TrafficDistribution:nil,},Status:ServiceStatus{LoadBalancer:LoadBalancerStatus{Ingress:[]LoadBalancerIngress{},},Conditions:[]Condition{},},}\\\\nI0929 19:09:30.665638 6786 obj_retry.go:386] Retry successful for *v1.Pod openshift-ovn-kubernetes/ovnkube-node-42vjg after 0 failed attempt(s)\\\\nI0929 19:09:30.665643 6786 obj_retry.go:303] Retry object setup: *v1.Pod openshift-network-console/networking-console-plugin-85b44fc459-gdk6g\\\\nF0929 19:09:30.665574 6786 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.netw\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T19:09:29Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-42vjg_openshift-ovn-kubernetes(046df2ef-fb75-4d32-93e6-17b36af0a7c2)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c8993f5be0b77b752f60e7346f609aaeef20db2f0e72cbebde06a11fa7d2f5bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e1e03c63d08ebe2a4b76954140684a5b43da9e92b93490aa7efa715fc2aefb4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e1e03c63d08ebe2a4b76954140684a5b43da9e92b93490aa7efa715fc2aefb4c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2kg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:30Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-42vjg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:09:32Z is after 2025-08-24T17:21:41Z" Sep 29 19:09:32 crc kubenswrapper[4779]: I0929 19:09:32.338549 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-zxpg4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e0db9b28-7e3a-4d44-9e98-1a07c8e5b8d6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2f0f837dfb05912fb4f7b0cb08cdb363cb3bfe2cc91a0bebd1b2b2b8128d78cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6ghv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:33Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-zxpg4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:09:32Z is after 2025-08-24T17:21:41Z" Sep 29 19:09:32 crc kubenswrapper[4779]: I0929 19:09:32.352845 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-7hb2m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"45b89d12-bbd2-4b47-815d-a7421cc1aa00\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://002d12663649198e6136f3e9df16a3b1cd8c64906bf39bde03cd0dde8c36abbc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bvn4g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:30Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-7hb2m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:09:32Z is after 2025-08-24T17:21:41Z" Sep 29 19:09:32 crc kubenswrapper[4779]: I0929 19:09:32.369100 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:09:32Z is after 2025-08-24T17:21:41Z" Sep 29 19:09:32 crc kubenswrapper[4779]: I0929 19:09:32.385376 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fvdtc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"97c4781c-2d4b-4eab-96fb-39a342c2d4a0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c2179d9cf26eb6d6fcc63666d3dd53e9a1a66cf633c9fe0b29e6d6ac726c8119\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7cxcv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3aeed56672d12f2355eda710f9021c6271debfaf1f7b7d6b1ff5f4ecc6764e2e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7cxcv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:43Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-fvdtc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:09:32Z is after 2025-08-24T17:21:41Z" Sep 29 19:09:32 crc kubenswrapper[4779]: I0929 19:09:32.393400 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:32 crc kubenswrapper[4779]: I0929 19:09:32.393443 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:32 crc kubenswrapper[4779]: I0929 19:09:32.393461 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:32 crc kubenswrapper[4779]: I0929 19:09:32.393483 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:32 crc kubenswrapper[4779]: I0929 19:09:32.393500 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:32Z","lastTransitionTime":"2025-09-29T19:09:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:32 crc kubenswrapper[4779]: I0929 19:09:32.419218 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8ccd862a-dc6a-4229-a5b4-c1a1fcb5a0a4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4598380e2d0b9f8258cb148abf8cfaa7a880d8aadc09c0d3416ab9ea976db3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://968e5da40da49dffb34c44e42d5a955975ab0fca1d9987312ee20b6c2ab42a52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b6423a6fa1e69da68b47af0b59980013d6bcc23b51d0b25bb0085fab03dc3e36\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6f5ea8a87c2318ebb245b796b274bda0d959d4666414d5b1bd6b03ffc309e141\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://92369b55bd00c0b1116fcb69584f4ba6d73f00511343902f243682db2cc6ff8b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b55ee00d3478e07a7efaeda5a4986cccbb1438bee9630adef3feebc7be94f204\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b55ee00d3478e07a7efaeda5a4986cccbb1438bee9630adef3feebc7be94f204\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://845c619c8e4a593f4f9339b4623d2d9ccb2d8f9ffe7fc763bf39c47e8028513b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://845c619c8e4a593f4f9339b4623d2d9ccb2d8f9ffe7fc763bf39c47e8028513b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:12Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://2c237a8c288e9ea7f9619f02915e0ca61bad4b3b5ffe9534bf2bc9fbda441302\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2c237a8c288e9ea7f9619f02915e0ca61bad4b3b5ffe9534bf2bc9fbda441302\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:09Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:09:32Z is after 2025-08-24T17:21:41Z" Sep 29 19:09:32 crc kubenswrapper[4779]: I0929 19:09:32.433548 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:09:32Z is after 2025-08-24T17:21:41Z" Sep 29 19:09:32 crc kubenswrapper[4779]: I0929 19:09:32.451220 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://04d9dd8d319497dd3ed7c565217d8b9d228a1952a3cd0e8dee3cdad781c5770a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:09:32Z is after 2025-08-24T17:21:41Z" Sep 29 19:09:32 crc kubenswrapper[4779]: I0929 19:09:32.462797 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"476bc421-1113-455e-bcc8-e207e47dad19\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://30844145987b3dae28d2ca3f75a36e21c27216b267ff40fdb83094b8045403c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tb7zk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7168ecfffda2ef4f7782650466ccebb8abcff3293fcd33d44eb4ee24abf6339c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tb7zk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:30Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-d5cnr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:09:32Z is after 2025-08-24T17:21:41Z" Sep 29 19:09:32 crc kubenswrapper[4779]: I0929 19:09:32.474849 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-2rtwf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4df079c4-34e3-4132-91bb-ad68488552f8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7sr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7sr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:44Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-2rtwf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:09:32Z is after 2025-08-24T17:21:41Z" Sep 29 19:09:32 crc kubenswrapper[4779]: I0929 19:09:32.488238 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"49dd46af-a7fa-47b0-94ab-c9a999564fca\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a67efc46aeb5956568b680b4041d83a55c0410ccefdb4e43b95503c6416e1dfc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2253a3406f6de9463c1c615ad2ac76ce935067baa57b260f18afdb8d9e639e95\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa9b804f65dc9265342d6ec10b8cdd31d49f692289c7fd8292944e25a2c95ebe\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://02428a90ed2dd1c36d800ffda10ed8759d92a87ec8f467cdf9fb11511b9b9420\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:09Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:09:32Z is after 2025-08-24T17:21:41Z" Sep 29 19:09:32 crc kubenswrapper[4779]: I0929 19:09:32.496693 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:32 crc kubenswrapper[4779]: I0929 19:09:32.496734 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:32 crc kubenswrapper[4779]: I0929 19:09:32.496745 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:32 crc kubenswrapper[4779]: I0929 19:09:32.496763 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:32 crc kubenswrapper[4779]: I0929 19:09:32.496778 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:32Z","lastTransitionTime":"2025-09-29T19:09:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:32 crc kubenswrapper[4779]: I0929 19:09:32.507793 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ef5a50f788b42b32f4ec04eedc9f5c9dbcd07f64b430ec684d8a91b414e6c18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb910a5c408a63f6c4391cf2fae320e14d6dc945125b53eefde3b666ef9d0754\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:09:32Z is after 2025-08-24T17:21:41Z" Sep 29 19:09:32 crc kubenswrapper[4779]: I0929 19:09:32.524615 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://15b9c6a3d7c79b30673cd8040a9f3033e2405cf2e29b38d8ba2bee9cbb384794\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:09:32Z is after 2025-08-24T17:21:41Z" Sep 29 19:09:32 crc kubenswrapper[4779]: I0929 19:09:32.544820 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jfbb6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3ac24bbf-c37a-4253-be71-8d8f15cfd48e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:09:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:09:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0a42b5ee3be8c80c6772c05f938a0f0be5896c66157b38f8b36cc3f9e03a950d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2ea6959ae00f851559cc059498a3a7848fbbb55f6a25ed82e38efe62a7ee85ca\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T19:09:18Z\\\",\\\"message\\\":\\\"2025-09-29T19:08:32+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_88611c75-0338-4025-8d9a-c7a4eb6f87da\\\\n2025-09-29T19:08:32+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_88611c75-0338-4025-8d9a-c7a4eb6f87da to /host/opt/cni/bin/\\\\n2025-09-29T19:08:33Z [verbose] multus-daemon started\\\\n2025-09-29T19:08:33Z [verbose] Readiness Indicator file check\\\\n2025-09-29T19:09:18Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:09:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8ckjq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:30Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jfbb6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:09:32Z is after 2025-08-24T17:21:41Z" Sep 29 19:09:32 crc kubenswrapper[4779]: I0929 19:09:32.566627 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-cvx8m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"54a33b8e-b623-4f91-be1d-a38dfcef17d7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T19:08:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://49599fb872eb458cb3fd77da63518f77653f671e82a8fc1db5f9e2b64e3c4d1d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T19:08:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e697e7c54a1b8adfde2dfa86399bfd13d895196c12108ac625bd9b94ad913d1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e697e7c54a1b8adfde2dfa86399bfd13d895196c12108ac625bd9b94ad913d1e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://75932bd27c986408eb2a3ec9aefffa392bf1ede4be0ddeabb9fdcd5fcc56199d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://75932bd27c986408eb2a3ec9aefffa392bf1ede4be0ddeabb9fdcd5fcc56199d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e058271bf65bb6f216298a9fc58b97c76335275829be7e5bec59d92688e83a05\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e058271bf65bb6f216298a9fc58b97c76335275829be7e5bec59d92688e83a05\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://edec2f7ae27e72dcc2eac62281dc9b866cdc31ecbda5e24020043aad86a44784\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://edec2f7ae27e72dcc2eac62281dc9b866cdc31ecbda5e24020043aad86a44784\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1750034ffcb61628b437e7b4f604be66ea6401d0ddb8a70f37fbb358c261670f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1750034ffcb61628b437e7b4f604be66ea6401d0ddb8a70f37fbb358c261670f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3409883fc5d9ed6b5e08d50c9d2821a4ac0b8932c9a0296c12065b0abb0354d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3409883fc5d9ed6b5e08d50c9d2821a4ac0b8932c9a0296c12065b0abb0354d6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T19:08:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T19:08:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c77zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T19:08:30Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-cvx8m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:09:32Z is after 2025-08-24T17:21:41Z" Sep 29 19:09:32 crc kubenswrapper[4779]: I0929 19:09:32.600136 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:32 crc kubenswrapper[4779]: I0929 19:09:32.600193 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:32 crc kubenswrapper[4779]: I0929 19:09:32.600210 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:32 crc kubenswrapper[4779]: I0929 19:09:32.600236 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:32 crc kubenswrapper[4779]: I0929 19:09:32.600283 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:32Z","lastTransitionTime":"2025-09-29T19:09:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:32 crc kubenswrapper[4779]: I0929 19:09:32.703752 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:32 crc kubenswrapper[4779]: I0929 19:09:32.703825 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:32 crc kubenswrapper[4779]: I0929 19:09:32.703849 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:32 crc kubenswrapper[4779]: I0929 19:09:32.703877 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:32 crc kubenswrapper[4779]: I0929 19:09:32.703899 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:32Z","lastTransitionTime":"2025-09-29T19:09:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:32 crc kubenswrapper[4779]: I0929 19:09:32.765511 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 19:09:32 crc kubenswrapper[4779]: I0929 19:09:32.765575 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-2rtwf" Sep 29 19:09:32 crc kubenswrapper[4779]: I0929 19:09:32.765597 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 19:09:32 crc kubenswrapper[4779]: I0929 19:09:32.765518 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 19:09:32 crc kubenswrapper[4779]: E0929 19:09:32.765674 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 19:09:32 crc kubenswrapper[4779]: E0929 19:09:32.765856 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 19:09:32 crc kubenswrapper[4779]: E0929 19:09:32.765988 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-2rtwf" podUID="4df079c4-34e3-4132-91bb-ad68488552f8" Sep 29 19:09:32 crc kubenswrapper[4779]: E0929 19:09:32.766237 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 19:09:32 crc kubenswrapper[4779]: I0929 19:09:32.806399 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:32 crc kubenswrapper[4779]: I0929 19:09:32.806476 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:32 crc kubenswrapper[4779]: I0929 19:09:32.806502 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:32 crc kubenswrapper[4779]: I0929 19:09:32.806531 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:32 crc kubenswrapper[4779]: I0929 19:09:32.806550 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:32Z","lastTransitionTime":"2025-09-29T19:09:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:32 crc kubenswrapper[4779]: I0929 19:09:32.908900 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:32 crc kubenswrapper[4779]: I0929 19:09:32.908932 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:32 crc kubenswrapper[4779]: I0929 19:09:32.908942 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:32 crc kubenswrapper[4779]: I0929 19:09:32.908958 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:32 crc kubenswrapper[4779]: I0929 19:09:32.908968 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:32Z","lastTransitionTime":"2025-09-29T19:09:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:33 crc kubenswrapper[4779]: I0929 19:09:33.012103 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:33 crc kubenswrapper[4779]: I0929 19:09:33.012190 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:33 crc kubenswrapper[4779]: I0929 19:09:33.012205 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:33 crc kubenswrapper[4779]: I0929 19:09:33.012229 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:33 crc kubenswrapper[4779]: I0929 19:09:33.012247 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:33Z","lastTransitionTime":"2025-09-29T19:09:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:33 crc kubenswrapper[4779]: I0929 19:09:33.114648 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:33 crc kubenswrapper[4779]: I0929 19:09:33.114693 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:33 crc kubenswrapper[4779]: I0929 19:09:33.114705 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:33 crc kubenswrapper[4779]: I0929 19:09:33.114721 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:33 crc kubenswrapper[4779]: I0929 19:09:33.114733 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:33Z","lastTransitionTime":"2025-09-29T19:09:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:33 crc kubenswrapper[4779]: I0929 19:09:33.217736 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:33 crc kubenswrapper[4779]: I0929 19:09:33.217795 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:33 crc kubenswrapper[4779]: I0929 19:09:33.217815 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:33 crc kubenswrapper[4779]: I0929 19:09:33.217839 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:33 crc kubenswrapper[4779]: I0929 19:09:33.217855 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:33Z","lastTransitionTime":"2025-09-29T19:09:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:33 crc kubenswrapper[4779]: I0929 19:09:33.319866 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:33 crc kubenswrapper[4779]: I0929 19:09:33.319912 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:33 crc kubenswrapper[4779]: I0929 19:09:33.319923 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:33 crc kubenswrapper[4779]: I0929 19:09:33.319942 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:33 crc kubenswrapper[4779]: I0929 19:09:33.319956 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:33Z","lastTransitionTime":"2025-09-29T19:09:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:33 crc kubenswrapper[4779]: I0929 19:09:33.422660 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:33 crc kubenswrapper[4779]: I0929 19:09:33.422711 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:33 crc kubenswrapper[4779]: I0929 19:09:33.422742 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:33 crc kubenswrapper[4779]: I0929 19:09:33.422760 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:33 crc kubenswrapper[4779]: I0929 19:09:33.422772 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:33Z","lastTransitionTime":"2025-09-29T19:09:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:33 crc kubenswrapper[4779]: I0929 19:09:33.525150 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:33 crc kubenswrapper[4779]: I0929 19:09:33.525205 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:33 crc kubenswrapper[4779]: I0929 19:09:33.525218 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:33 crc kubenswrapper[4779]: I0929 19:09:33.525237 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:33 crc kubenswrapper[4779]: I0929 19:09:33.525250 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:33Z","lastTransitionTime":"2025-09-29T19:09:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:33 crc kubenswrapper[4779]: I0929 19:09:33.628664 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:33 crc kubenswrapper[4779]: I0929 19:09:33.628744 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:33 crc kubenswrapper[4779]: I0929 19:09:33.628768 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:33 crc kubenswrapper[4779]: I0929 19:09:33.628799 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:33 crc kubenswrapper[4779]: I0929 19:09:33.628821 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:33Z","lastTransitionTime":"2025-09-29T19:09:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:33 crc kubenswrapper[4779]: I0929 19:09:33.731593 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:33 crc kubenswrapper[4779]: I0929 19:09:33.731663 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:33 crc kubenswrapper[4779]: I0929 19:09:33.731680 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:33 crc kubenswrapper[4779]: I0929 19:09:33.731706 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:33 crc kubenswrapper[4779]: I0929 19:09:33.731723 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:33Z","lastTransitionTime":"2025-09-29T19:09:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:33 crc kubenswrapper[4779]: I0929 19:09:33.834800 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:33 crc kubenswrapper[4779]: I0929 19:09:33.834853 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:33 crc kubenswrapper[4779]: I0929 19:09:33.834867 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:33 crc kubenswrapper[4779]: I0929 19:09:33.834884 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:33 crc kubenswrapper[4779]: I0929 19:09:33.834897 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:33Z","lastTransitionTime":"2025-09-29T19:09:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:33 crc kubenswrapper[4779]: I0929 19:09:33.937774 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:33 crc kubenswrapper[4779]: I0929 19:09:33.937847 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:33 crc kubenswrapper[4779]: I0929 19:09:33.937868 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:33 crc kubenswrapper[4779]: I0929 19:09:33.937894 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:33 crc kubenswrapper[4779]: I0929 19:09:33.937916 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:33Z","lastTransitionTime":"2025-09-29T19:09:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:34 crc kubenswrapper[4779]: I0929 19:09:34.040368 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:34 crc kubenswrapper[4779]: I0929 19:09:34.040430 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:34 crc kubenswrapper[4779]: I0929 19:09:34.040472 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:34 crc kubenswrapper[4779]: I0929 19:09:34.040501 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:34 crc kubenswrapper[4779]: I0929 19:09:34.040519 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:34Z","lastTransitionTime":"2025-09-29T19:09:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:34 crc kubenswrapper[4779]: I0929 19:09:34.143241 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:34 crc kubenswrapper[4779]: I0929 19:09:34.143301 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:34 crc kubenswrapper[4779]: I0929 19:09:34.143329 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:34 crc kubenswrapper[4779]: I0929 19:09:34.143354 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:34 crc kubenswrapper[4779]: I0929 19:09:34.143365 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:34Z","lastTransitionTime":"2025-09-29T19:09:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:34 crc kubenswrapper[4779]: I0929 19:09:34.246707 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:34 crc kubenswrapper[4779]: I0929 19:09:34.247144 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:34 crc kubenswrapper[4779]: I0929 19:09:34.247158 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:34 crc kubenswrapper[4779]: I0929 19:09:34.247178 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:34 crc kubenswrapper[4779]: I0929 19:09:34.247191 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:34Z","lastTransitionTime":"2025-09-29T19:09:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:34 crc kubenswrapper[4779]: I0929 19:09:34.351366 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:34 crc kubenswrapper[4779]: I0929 19:09:34.351407 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:34 crc kubenswrapper[4779]: I0929 19:09:34.351418 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:34 crc kubenswrapper[4779]: I0929 19:09:34.351435 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:34 crc kubenswrapper[4779]: I0929 19:09:34.351446 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:34Z","lastTransitionTime":"2025-09-29T19:09:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:34 crc kubenswrapper[4779]: I0929 19:09:34.454935 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:34 crc kubenswrapper[4779]: I0929 19:09:34.455010 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:34 crc kubenswrapper[4779]: I0929 19:09:34.455035 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:34 crc kubenswrapper[4779]: I0929 19:09:34.455067 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:34 crc kubenswrapper[4779]: I0929 19:09:34.455089 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:34Z","lastTransitionTime":"2025-09-29T19:09:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:34 crc kubenswrapper[4779]: I0929 19:09:34.557577 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:34 crc kubenswrapper[4779]: I0929 19:09:34.557658 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:34 crc kubenswrapper[4779]: I0929 19:09:34.557676 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:34 crc kubenswrapper[4779]: I0929 19:09:34.557698 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:34 crc kubenswrapper[4779]: I0929 19:09:34.557715 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:34Z","lastTransitionTime":"2025-09-29T19:09:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:34 crc kubenswrapper[4779]: I0929 19:09:34.661270 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:34 crc kubenswrapper[4779]: I0929 19:09:34.661360 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:34 crc kubenswrapper[4779]: I0929 19:09:34.661379 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:34 crc kubenswrapper[4779]: I0929 19:09:34.661403 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:34 crc kubenswrapper[4779]: I0929 19:09:34.661420 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:34Z","lastTransitionTime":"2025-09-29T19:09:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:34 crc kubenswrapper[4779]: I0929 19:09:34.763457 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 19:09:34 crc kubenswrapper[4779]: I0929 19:09:34.763589 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 19:09:34 crc kubenswrapper[4779]: I0929 19:09:34.763623 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 19:09:34 crc kubenswrapper[4779]: E0929 19:09:34.763666 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 19:10:38.763639416 +0000 UTC m=+149.648064526 (durationBeforeRetry 1m4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 19:09:34 crc kubenswrapper[4779]: I0929 19:09:34.763706 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 19:09:34 crc kubenswrapper[4779]: E0929 19:09:34.763737 4779 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Sep 29 19:09:34 crc kubenswrapper[4779]: E0929 19:09:34.763755 4779 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Sep 29 19:09:34 crc kubenswrapper[4779]: E0929 19:09:34.763768 4779 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 29 19:09:34 crc kubenswrapper[4779]: E0929 19:09:34.763820 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-09-29 19:10:38.763801761 +0000 UTC m=+149.648226961 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 29 19:09:34 crc kubenswrapper[4779]: I0929 19:09:34.763839 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 19:09:34 crc kubenswrapper[4779]: E0929 19:09:34.763930 4779 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Sep 29 19:09:34 crc kubenswrapper[4779]: E0929 19:09:34.763959 4779 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Sep 29 19:09:34 crc kubenswrapper[4779]: E0929 19:09:34.763975 4779 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Sep 29 19:09:34 crc kubenswrapper[4779]: E0929 19:09:34.764004 4779 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 29 19:09:34 crc kubenswrapper[4779]: E0929 19:09:34.764028 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-09-29 19:10:38.764016096 +0000 UTC m=+149.648441206 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Sep 29 19:09:34 crc kubenswrapper[4779]: I0929 19:09:34.763747 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:34 crc kubenswrapper[4779]: E0929 19:09:34.764071 4779 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Sep 29 19:09:34 crc kubenswrapper[4779]: I0929 19:09:34.764089 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:34 crc kubenswrapper[4779]: I0929 19:09:34.764121 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:34 crc kubenswrapper[4779]: E0929 19:09:34.764076 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-09-29 19:10:38.764057217 +0000 UTC m=+149.648482357 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 29 19:09:34 crc kubenswrapper[4779]: I0929 19:09:34.764145 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:34 crc kubenswrapper[4779]: E0929 19:09:34.764162 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-09-29 19:10:38.76414246 +0000 UTC m=+149.648567600 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Sep 29 19:09:34 crc kubenswrapper[4779]: I0929 19:09:34.764165 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:34Z","lastTransitionTime":"2025-09-29T19:09:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:34 crc kubenswrapper[4779]: I0929 19:09:34.765342 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 19:09:34 crc kubenswrapper[4779]: I0929 19:09:34.765408 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 19:09:34 crc kubenswrapper[4779]: E0929 19:09:34.765455 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 19:09:34 crc kubenswrapper[4779]: I0929 19:09:34.765468 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-2rtwf" Sep 29 19:09:34 crc kubenswrapper[4779]: I0929 19:09:34.765483 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 19:09:34 crc kubenswrapper[4779]: E0929 19:09:34.765541 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 19:09:34 crc kubenswrapper[4779]: E0929 19:09:34.765658 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-2rtwf" podUID="4df079c4-34e3-4132-91bb-ad68488552f8" Sep 29 19:09:34 crc kubenswrapper[4779]: E0929 19:09:34.765778 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 19:09:34 crc kubenswrapper[4779]: I0929 19:09:34.866680 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:34 crc kubenswrapper[4779]: I0929 19:09:34.866750 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:34 crc kubenswrapper[4779]: I0929 19:09:34.866762 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:34 crc kubenswrapper[4779]: I0929 19:09:34.866779 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:34 crc kubenswrapper[4779]: I0929 19:09:34.866792 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:34Z","lastTransitionTime":"2025-09-29T19:09:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:34 crc kubenswrapper[4779]: I0929 19:09:34.969368 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:34 crc kubenswrapper[4779]: I0929 19:09:34.969407 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:34 crc kubenswrapper[4779]: I0929 19:09:34.969418 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:34 crc kubenswrapper[4779]: I0929 19:09:34.969492 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:34 crc kubenswrapper[4779]: I0929 19:09:34.969504 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:34Z","lastTransitionTime":"2025-09-29T19:09:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:35 crc kubenswrapper[4779]: I0929 19:09:35.072076 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:35 crc kubenswrapper[4779]: I0929 19:09:35.072109 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:35 crc kubenswrapper[4779]: I0929 19:09:35.072119 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:35 crc kubenswrapper[4779]: I0929 19:09:35.072133 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:35 crc kubenswrapper[4779]: I0929 19:09:35.072145 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:35Z","lastTransitionTime":"2025-09-29T19:09:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:35 crc kubenswrapper[4779]: I0929 19:09:35.174946 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:35 crc kubenswrapper[4779]: I0929 19:09:35.175044 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:35 crc kubenswrapper[4779]: I0929 19:09:35.175066 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:35 crc kubenswrapper[4779]: I0929 19:09:35.175093 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:35 crc kubenswrapper[4779]: I0929 19:09:35.175113 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:35Z","lastTransitionTime":"2025-09-29T19:09:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:35 crc kubenswrapper[4779]: I0929 19:09:35.277325 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:35 crc kubenswrapper[4779]: I0929 19:09:35.277367 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:35 crc kubenswrapper[4779]: I0929 19:09:35.277379 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:35 crc kubenswrapper[4779]: I0929 19:09:35.277395 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:35 crc kubenswrapper[4779]: I0929 19:09:35.277407 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:35Z","lastTransitionTime":"2025-09-29T19:09:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:35 crc kubenswrapper[4779]: I0929 19:09:35.381297 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:35 crc kubenswrapper[4779]: I0929 19:09:35.381353 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:35 crc kubenswrapper[4779]: I0929 19:09:35.381365 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:35 crc kubenswrapper[4779]: I0929 19:09:35.381383 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:35 crc kubenswrapper[4779]: I0929 19:09:35.381395 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:35Z","lastTransitionTime":"2025-09-29T19:09:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:35 crc kubenswrapper[4779]: I0929 19:09:35.483789 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:35 crc kubenswrapper[4779]: I0929 19:09:35.483855 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:35 crc kubenswrapper[4779]: I0929 19:09:35.483866 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:35 crc kubenswrapper[4779]: I0929 19:09:35.483882 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:35 crc kubenswrapper[4779]: I0929 19:09:35.483900 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:35Z","lastTransitionTime":"2025-09-29T19:09:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:35 crc kubenswrapper[4779]: I0929 19:09:35.587629 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:35 crc kubenswrapper[4779]: I0929 19:09:35.587686 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:35 crc kubenswrapper[4779]: I0929 19:09:35.587702 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:35 crc kubenswrapper[4779]: I0929 19:09:35.587724 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:35 crc kubenswrapper[4779]: I0929 19:09:35.587741 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:35Z","lastTransitionTime":"2025-09-29T19:09:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:35 crc kubenswrapper[4779]: I0929 19:09:35.691280 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:35 crc kubenswrapper[4779]: I0929 19:09:35.691439 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:35 crc kubenswrapper[4779]: I0929 19:09:35.691458 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:35 crc kubenswrapper[4779]: I0929 19:09:35.691480 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:35 crc kubenswrapper[4779]: I0929 19:09:35.691498 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:35Z","lastTransitionTime":"2025-09-29T19:09:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:35 crc kubenswrapper[4779]: I0929 19:09:35.794653 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:35 crc kubenswrapper[4779]: I0929 19:09:35.794719 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:35 crc kubenswrapper[4779]: I0929 19:09:35.794745 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:35 crc kubenswrapper[4779]: I0929 19:09:35.794774 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:35 crc kubenswrapper[4779]: I0929 19:09:35.794796 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:35Z","lastTransitionTime":"2025-09-29T19:09:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:35 crc kubenswrapper[4779]: I0929 19:09:35.896681 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:35 crc kubenswrapper[4779]: I0929 19:09:35.896742 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:35 crc kubenswrapper[4779]: I0929 19:09:35.896759 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:35 crc kubenswrapper[4779]: I0929 19:09:35.896781 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:35 crc kubenswrapper[4779]: I0929 19:09:35.896799 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:35Z","lastTransitionTime":"2025-09-29T19:09:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:36 crc kubenswrapper[4779]: I0929 19:09:36.000029 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:36 crc kubenswrapper[4779]: I0929 19:09:36.000103 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:36 crc kubenswrapper[4779]: I0929 19:09:36.000125 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:36 crc kubenswrapper[4779]: I0929 19:09:36.000155 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:36 crc kubenswrapper[4779]: I0929 19:09:36.000175 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:36Z","lastTransitionTime":"2025-09-29T19:09:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:36 crc kubenswrapper[4779]: I0929 19:09:36.103696 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:36 crc kubenswrapper[4779]: I0929 19:09:36.103761 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:36 crc kubenswrapper[4779]: I0929 19:09:36.103783 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:36 crc kubenswrapper[4779]: I0929 19:09:36.103812 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:36 crc kubenswrapper[4779]: I0929 19:09:36.103833 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:36Z","lastTransitionTime":"2025-09-29T19:09:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:36 crc kubenswrapper[4779]: I0929 19:09:36.207237 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:36 crc kubenswrapper[4779]: I0929 19:09:36.207297 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:36 crc kubenswrapper[4779]: I0929 19:09:36.207348 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:36 crc kubenswrapper[4779]: I0929 19:09:36.207373 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:36 crc kubenswrapper[4779]: I0929 19:09:36.207389 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:36Z","lastTransitionTime":"2025-09-29T19:09:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:36 crc kubenswrapper[4779]: I0929 19:09:36.310893 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:36 crc kubenswrapper[4779]: I0929 19:09:36.310953 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:36 crc kubenswrapper[4779]: I0929 19:09:36.310971 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:36 crc kubenswrapper[4779]: I0929 19:09:36.310995 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:36 crc kubenswrapper[4779]: I0929 19:09:36.311012 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:36Z","lastTransitionTime":"2025-09-29T19:09:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:36 crc kubenswrapper[4779]: I0929 19:09:36.413840 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:36 crc kubenswrapper[4779]: I0929 19:09:36.413911 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:36 crc kubenswrapper[4779]: I0929 19:09:36.413934 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:36 crc kubenswrapper[4779]: I0929 19:09:36.413963 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:36 crc kubenswrapper[4779]: I0929 19:09:36.413987 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:36Z","lastTransitionTime":"2025-09-29T19:09:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:36 crc kubenswrapper[4779]: I0929 19:09:36.516857 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:36 crc kubenswrapper[4779]: I0929 19:09:36.516919 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:36 crc kubenswrapper[4779]: I0929 19:09:36.516942 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:36 crc kubenswrapper[4779]: I0929 19:09:36.516971 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:36 crc kubenswrapper[4779]: I0929 19:09:36.516992 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:36Z","lastTransitionTime":"2025-09-29T19:09:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:36 crc kubenswrapper[4779]: I0929 19:09:36.620150 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:36 crc kubenswrapper[4779]: I0929 19:09:36.620204 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:36 crc kubenswrapper[4779]: I0929 19:09:36.620221 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:36 crc kubenswrapper[4779]: I0929 19:09:36.620274 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:36 crc kubenswrapper[4779]: I0929 19:09:36.620290 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:36Z","lastTransitionTime":"2025-09-29T19:09:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:36 crc kubenswrapper[4779]: I0929 19:09:36.723292 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:36 crc kubenswrapper[4779]: I0929 19:09:36.723390 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:36 crc kubenswrapper[4779]: I0929 19:09:36.723423 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:36 crc kubenswrapper[4779]: I0929 19:09:36.723451 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:36 crc kubenswrapper[4779]: I0929 19:09:36.723471 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:36Z","lastTransitionTime":"2025-09-29T19:09:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:36 crc kubenswrapper[4779]: I0929 19:09:36.765618 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-2rtwf" Sep 29 19:09:36 crc kubenswrapper[4779]: I0929 19:09:36.765698 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 19:09:36 crc kubenswrapper[4779]: I0929 19:09:36.765633 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 19:09:36 crc kubenswrapper[4779]: I0929 19:09:36.765825 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 19:09:36 crc kubenswrapper[4779]: E0929 19:09:36.765786 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-2rtwf" podUID="4df079c4-34e3-4132-91bb-ad68488552f8" Sep 29 19:09:36 crc kubenswrapper[4779]: E0929 19:09:36.765989 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 19:09:36 crc kubenswrapper[4779]: E0929 19:09:36.766103 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 19:09:36 crc kubenswrapper[4779]: E0929 19:09:36.766252 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 19:09:36 crc kubenswrapper[4779]: I0929 19:09:36.826955 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:36 crc kubenswrapper[4779]: I0929 19:09:36.827001 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:36 crc kubenswrapper[4779]: I0929 19:09:36.827008 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:36 crc kubenswrapper[4779]: I0929 19:09:36.827022 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:36 crc kubenswrapper[4779]: I0929 19:09:36.827032 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:36Z","lastTransitionTime":"2025-09-29T19:09:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:36 crc kubenswrapper[4779]: I0929 19:09:36.930216 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:36 crc kubenswrapper[4779]: I0929 19:09:36.930276 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:36 crc kubenswrapper[4779]: I0929 19:09:36.930294 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:36 crc kubenswrapper[4779]: I0929 19:09:36.930356 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:36 crc kubenswrapper[4779]: I0929 19:09:36.930374 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:36Z","lastTransitionTime":"2025-09-29T19:09:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:37 crc kubenswrapper[4779]: I0929 19:09:37.032662 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:37 crc kubenswrapper[4779]: I0929 19:09:37.032703 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:37 crc kubenswrapper[4779]: I0929 19:09:37.032715 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:37 crc kubenswrapper[4779]: I0929 19:09:37.032729 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:37 crc kubenswrapper[4779]: I0929 19:09:37.032739 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:37Z","lastTransitionTime":"2025-09-29T19:09:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:37 crc kubenswrapper[4779]: I0929 19:09:37.135094 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:37 crc kubenswrapper[4779]: I0929 19:09:37.135132 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:37 crc kubenswrapper[4779]: I0929 19:09:37.135142 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:37 crc kubenswrapper[4779]: I0929 19:09:37.135157 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:37 crc kubenswrapper[4779]: I0929 19:09:37.135167 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:37Z","lastTransitionTime":"2025-09-29T19:09:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:37 crc kubenswrapper[4779]: I0929 19:09:37.237582 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:37 crc kubenswrapper[4779]: I0929 19:09:37.237655 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:37 crc kubenswrapper[4779]: I0929 19:09:37.237666 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:37 crc kubenswrapper[4779]: I0929 19:09:37.237709 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:37 crc kubenswrapper[4779]: I0929 19:09:37.237724 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:37Z","lastTransitionTime":"2025-09-29T19:09:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:37 crc kubenswrapper[4779]: I0929 19:09:37.340406 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:37 crc kubenswrapper[4779]: I0929 19:09:37.340488 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:37 crc kubenswrapper[4779]: I0929 19:09:37.340501 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:37 crc kubenswrapper[4779]: I0929 19:09:37.340518 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:37 crc kubenswrapper[4779]: I0929 19:09:37.340530 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:37Z","lastTransitionTime":"2025-09-29T19:09:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:37 crc kubenswrapper[4779]: I0929 19:09:37.443806 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:37 crc kubenswrapper[4779]: I0929 19:09:37.443863 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:37 crc kubenswrapper[4779]: I0929 19:09:37.443873 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:37 crc kubenswrapper[4779]: I0929 19:09:37.443889 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:37 crc kubenswrapper[4779]: I0929 19:09:37.443901 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:37Z","lastTransitionTime":"2025-09-29T19:09:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:37 crc kubenswrapper[4779]: I0929 19:09:37.546844 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:37 crc kubenswrapper[4779]: I0929 19:09:37.546880 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:37 crc kubenswrapper[4779]: I0929 19:09:37.546891 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:37 crc kubenswrapper[4779]: I0929 19:09:37.546905 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:37 crc kubenswrapper[4779]: I0929 19:09:37.546916 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:37Z","lastTransitionTime":"2025-09-29T19:09:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:37 crc kubenswrapper[4779]: I0929 19:09:37.633150 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:37 crc kubenswrapper[4779]: I0929 19:09:37.633195 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:37 crc kubenswrapper[4779]: I0929 19:09:37.633203 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:37 crc kubenswrapper[4779]: I0929 19:09:37.633218 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:37 crc kubenswrapper[4779]: I0929 19:09:37.633226 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:37Z","lastTransitionTime":"2025-09-29T19:09:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:37 crc kubenswrapper[4779]: E0929 19:09:37.650201 4779 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T19:09:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T19:09:37Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T19:09:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T19:09:37Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T19:09:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T19:09:37Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T19:09:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T19:09:37Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c0f08dfd-4d0c-4b55-a30c-6725bfe13689\\\",\\\"systemUUID\\\":\\\"d61591a8-214b-4be1-8c58-e9ade5216b62\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:09:37Z is after 2025-08-24T17:21:41Z" Sep 29 19:09:37 crc kubenswrapper[4779]: I0929 19:09:37.655000 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:37 crc kubenswrapper[4779]: I0929 19:09:37.655036 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:37 crc kubenswrapper[4779]: I0929 19:09:37.655045 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:37 crc kubenswrapper[4779]: I0929 19:09:37.655060 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:37 crc kubenswrapper[4779]: I0929 19:09:37.655069 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:37Z","lastTransitionTime":"2025-09-29T19:09:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:37 crc kubenswrapper[4779]: E0929 19:09:37.672124 4779 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T19:09:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T19:09:37Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T19:09:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T19:09:37Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T19:09:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T19:09:37Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T19:09:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T19:09:37Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c0f08dfd-4d0c-4b55-a30c-6725bfe13689\\\",\\\"systemUUID\\\":\\\"d61591a8-214b-4be1-8c58-e9ade5216b62\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:09:37Z is after 2025-08-24T17:21:41Z" Sep 29 19:09:37 crc kubenswrapper[4779]: I0929 19:09:37.677437 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:37 crc kubenswrapper[4779]: I0929 19:09:37.677482 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:37 crc kubenswrapper[4779]: I0929 19:09:37.677496 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:37 crc kubenswrapper[4779]: I0929 19:09:37.677511 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:37 crc kubenswrapper[4779]: I0929 19:09:37.677521 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:37Z","lastTransitionTime":"2025-09-29T19:09:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:37 crc kubenswrapper[4779]: E0929 19:09:37.689909 4779 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T19:09:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T19:09:37Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T19:09:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T19:09:37Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T19:09:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T19:09:37Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T19:09:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T19:09:37Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c0f08dfd-4d0c-4b55-a30c-6725bfe13689\\\",\\\"systemUUID\\\":\\\"d61591a8-214b-4be1-8c58-e9ade5216b62\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:09:37Z is after 2025-08-24T17:21:41Z" Sep 29 19:09:37 crc kubenswrapper[4779]: I0929 19:09:37.698777 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:37 crc kubenswrapper[4779]: I0929 19:09:37.698830 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:37 crc kubenswrapper[4779]: I0929 19:09:37.698846 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:37 crc kubenswrapper[4779]: I0929 19:09:37.698866 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:37 crc kubenswrapper[4779]: I0929 19:09:37.698880 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:37Z","lastTransitionTime":"2025-09-29T19:09:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:37 crc kubenswrapper[4779]: E0929 19:09:37.720064 4779 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T19:09:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T19:09:37Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T19:09:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T19:09:37Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T19:09:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T19:09:37Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T19:09:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T19:09:37Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c0f08dfd-4d0c-4b55-a30c-6725bfe13689\\\",\\\"systemUUID\\\":\\\"d61591a8-214b-4be1-8c58-e9ade5216b62\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:09:37Z is after 2025-08-24T17:21:41Z" Sep 29 19:09:37 crc kubenswrapper[4779]: I0929 19:09:37.723858 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:37 crc kubenswrapper[4779]: I0929 19:09:37.723924 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:37 crc kubenswrapper[4779]: I0929 19:09:37.723943 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:37 crc kubenswrapper[4779]: I0929 19:09:37.723968 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:37 crc kubenswrapper[4779]: I0929 19:09:37.723986 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:37Z","lastTransitionTime":"2025-09-29T19:09:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:37 crc kubenswrapper[4779]: E0929 19:09:37.740885 4779 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T19:09:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T19:09:37Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T19:09:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T19:09:37Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T19:09:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T19:09:37Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T19:09:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T19:09:37Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c0f08dfd-4d0c-4b55-a30c-6725bfe13689\\\",\\\"systemUUID\\\":\\\"d61591a8-214b-4be1-8c58-e9ade5216b62\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T19:09:37Z is after 2025-08-24T17:21:41Z" Sep 29 19:09:37 crc kubenswrapper[4779]: E0929 19:09:37.741058 4779 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Sep 29 19:09:37 crc kubenswrapper[4779]: I0929 19:09:37.742915 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:37 crc kubenswrapper[4779]: I0929 19:09:37.742951 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:37 crc kubenswrapper[4779]: I0929 19:09:37.742962 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:37 crc kubenswrapper[4779]: I0929 19:09:37.742977 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:37 crc kubenswrapper[4779]: I0929 19:09:37.742988 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:37Z","lastTransitionTime":"2025-09-29T19:09:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:37 crc kubenswrapper[4779]: I0929 19:09:37.845524 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:37 crc kubenswrapper[4779]: I0929 19:09:37.845593 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:37 crc kubenswrapper[4779]: I0929 19:09:37.845614 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:37 crc kubenswrapper[4779]: I0929 19:09:37.845642 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:37 crc kubenswrapper[4779]: I0929 19:09:37.845664 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:37Z","lastTransitionTime":"2025-09-29T19:09:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:37 crc kubenswrapper[4779]: I0929 19:09:37.948229 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:37 crc kubenswrapper[4779]: I0929 19:09:37.948277 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:37 crc kubenswrapper[4779]: I0929 19:09:37.948288 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:37 crc kubenswrapper[4779]: I0929 19:09:37.948310 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:37 crc kubenswrapper[4779]: I0929 19:09:37.948354 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:37Z","lastTransitionTime":"2025-09-29T19:09:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:38 crc kubenswrapper[4779]: I0929 19:09:38.051035 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:38 crc kubenswrapper[4779]: I0929 19:09:38.051368 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:38 crc kubenswrapper[4779]: I0929 19:09:38.051503 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:38 crc kubenswrapper[4779]: I0929 19:09:38.051645 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:38 crc kubenswrapper[4779]: I0929 19:09:38.051826 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:38Z","lastTransitionTime":"2025-09-29T19:09:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:38 crc kubenswrapper[4779]: I0929 19:09:38.155417 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:38 crc kubenswrapper[4779]: I0929 19:09:38.155472 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:38 crc kubenswrapper[4779]: I0929 19:09:38.155488 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:38 crc kubenswrapper[4779]: I0929 19:09:38.155527 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:38 crc kubenswrapper[4779]: I0929 19:09:38.155545 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:38Z","lastTransitionTime":"2025-09-29T19:09:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:38 crc kubenswrapper[4779]: I0929 19:09:38.258220 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:38 crc kubenswrapper[4779]: I0929 19:09:38.258866 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:38 crc kubenswrapper[4779]: I0929 19:09:38.259021 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:38 crc kubenswrapper[4779]: I0929 19:09:38.259159 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:38 crc kubenswrapper[4779]: I0929 19:09:38.259287 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:38Z","lastTransitionTime":"2025-09-29T19:09:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:38 crc kubenswrapper[4779]: I0929 19:09:38.363077 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:38 crc kubenswrapper[4779]: I0929 19:09:38.363432 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:38 crc kubenswrapper[4779]: I0929 19:09:38.363632 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:38 crc kubenswrapper[4779]: I0929 19:09:38.363778 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:38 crc kubenswrapper[4779]: I0929 19:09:38.363923 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:38Z","lastTransitionTime":"2025-09-29T19:09:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:38 crc kubenswrapper[4779]: I0929 19:09:38.466797 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:38 crc kubenswrapper[4779]: I0929 19:09:38.466865 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:38 crc kubenswrapper[4779]: I0929 19:09:38.466876 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:38 crc kubenswrapper[4779]: I0929 19:09:38.466894 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:38 crc kubenswrapper[4779]: I0929 19:09:38.466905 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:38Z","lastTransitionTime":"2025-09-29T19:09:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:38 crc kubenswrapper[4779]: I0929 19:09:38.569400 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:38 crc kubenswrapper[4779]: I0929 19:09:38.569460 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:38 crc kubenswrapper[4779]: I0929 19:09:38.569477 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:38 crc kubenswrapper[4779]: I0929 19:09:38.569502 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:38 crc kubenswrapper[4779]: I0929 19:09:38.569519 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:38Z","lastTransitionTime":"2025-09-29T19:09:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:38 crc kubenswrapper[4779]: I0929 19:09:38.672054 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:38 crc kubenswrapper[4779]: I0929 19:09:38.672090 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:38 crc kubenswrapper[4779]: I0929 19:09:38.672098 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:38 crc kubenswrapper[4779]: I0929 19:09:38.672112 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:38 crc kubenswrapper[4779]: I0929 19:09:38.672120 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:38Z","lastTransitionTime":"2025-09-29T19:09:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:38 crc kubenswrapper[4779]: I0929 19:09:38.765483 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-2rtwf" Sep 29 19:09:38 crc kubenswrapper[4779]: I0929 19:09:38.765625 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 19:09:38 crc kubenswrapper[4779]: E0929 19:09:38.765838 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-2rtwf" podUID="4df079c4-34e3-4132-91bb-ad68488552f8" Sep 29 19:09:38 crc kubenswrapper[4779]: I0929 19:09:38.766092 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 19:09:38 crc kubenswrapper[4779]: I0929 19:09:38.766103 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 19:09:38 crc kubenswrapper[4779]: E0929 19:09:38.766262 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 19:09:38 crc kubenswrapper[4779]: E0929 19:09:38.766431 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 19:09:38 crc kubenswrapper[4779]: E0929 19:09:38.766550 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 19:09:38 crc kubenswrapper[4779]: I0929 19:09:38.775019 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:38 crc kubenswrapper[4779]: I0929 19:09:38.775069 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:38 crc kubenswrapper[4779]: I0929 19:09:38.775086 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:38 crc kubenswrapper[4779]: I0929 19:09:38.775108 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:38 crc kubenswrapper[4779]: I0929 19:09:38.775125 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:38Z","lastTransitionTime":"2025-09-29T19:09:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:38 crc kubenswrapper[4779]: I0929 19:09:38.877855 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:38 crc kubenswrapper[4779]: I0929 19:09:38.877922 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:38 crc kubenswrapper[4779]: I0929 19:09:38.877940 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:38 crc kubenswrapper[4779]: I0929 19:09:38.877979 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:38 crc kubenswrapper[4779]: I0929 19:09:38.878014 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:38Z","lastTransitionTime":"2025-09-29T19:09:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:38 crc kubenswrapper[4779]: I0929 19:09:38.981070 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:38 crc kubenswrapper[4779]: I0929 19:09:38.981129 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:38 crc kubenswrapper[4779]: I0929 19:09:38.981141 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:38 crc kubenswrapper[4779]: I0929 19:09:38.981158 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:38 crc kubenswrapper[4779]: I0929 19:09:38.981170 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:38Z","lastTransitionTime":"2025-09-29T19:09:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:39 crc kubenswrapper[4779]: I0929 19:09:39.084270 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:39 crc kubenswrapper[4779]: I0929 19:09:39.084364 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:39 crc kubenswrapper[4779]: I0929 19:09:39.084382 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:39 crc kubenswrapper[4779]: I0929 19:09:39.084401 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:39 crc kubenswrapper[4779]: I0929 19:09:39.084416 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:39Z","lastTransitionTime":"2025-09-29T19:09:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:39 crc kubenswrapper[4779]: I0929 19:09:39.187694 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:39 crc kubenswrapper[4779]: I0929 19:09:39.187804 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:39 crc kubenswrapper[4779]: I0929 19:09:39.187853 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:39 crc kubenswrapper[4779]: I0929 19:09:39.187879 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:39 crc kubenswrapper[4779]: I0929 19:09:39.187918 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:39Z","lastTransitionTime":"2025-09-29T19:09:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:39 crc kubenswrapper[4779]: I0929 19:09:39.289954 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:39 crc kubenswrapper[4779]: I0929 19:09:39.290391 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:39 crc kubenswrapper[4779]: I0929 19:09:39.290606 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:39 crc kubenswrapper[4779]: I0929 19:09:39.290813 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:39 crc kubenswrapper[4779]: I0929 19:09:39.291039 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:39Z","lastTransitionTime":"2025-09-29T19:09:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:39 crc kubenswrapper[4779]: I0929 19:09:39.394661 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:39 crc kubenswrapper[4779]: I0929 19:09:39.394749 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:39 crc kubenswrapper[4779]: I0929 19:09:39.394769 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:39 crc kubenswrapper[4779]: I0929 19:09:39.394785 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:39 crc kubenswrapper[4779]: I0929 19:09:39.394797 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:39Z","lastTransitionTime":"2025-09-29T19:09:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:39 crc kubenswrapper[4779]: I0929 19:09:39.496917 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:39 crc kubenswrapper[4779]: I0929 19:09:39.497286 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:39 crc kubenswrapper[4779]: I0929 19:09:39.497470 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:39 crc kubenswrapper[4779]: I0929 19:09:39.497603 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:39 crc kubenswrapper[4779]: I0929 19:09:39.497769 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:39Z","lastTransitionTime":"2025-09-29T19:09:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:39 crc kubenswrapper[4779]: I0929 19:09:39.600765 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:39 crc kubenswrapper[4779]: I0929 19:09:39.600824 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:39 crc kubenswrapper[4779]: I0929 19:09:39.600840 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:39 crc kubenswrapper[4779]: I0929 19:09:39.600864 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:39 crc kubenswrapper[4779]: I0929 19:09:39.600883 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:39Z","lastTransitionTime":"2025-09-29T19:09:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:39 crc kubenswrapper[4779]: I0929 19:09:39.704495 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:39 crc kubenswrapper[4779]: I0929 19:09:39.705045 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:39 crc kubenswrapper[4779]: I0929 19:09:39.705188 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:39 crc kubenswrapper[4779]: I0929 19:09:39.705379 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:39 crc kubenswrapper[4779]: I0929 19:09:39.705537 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:39Z","lastTransitionTime":"2025-09-29T19:09:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:39 crc kubenswrapper[4779]: I0929 19:09:39.808717 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" podStartSLOduration=70.808638073 podStartE2EDuration="1m10.808638073s" podCreationTimestamp="2025-09-29 19:08:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 19:09:39.802671643 +0000 UTC m=+90.687096833" watchObservedRunningTime="2025-09-29 19:09:39.808638073 +0000 UTC m=+90.693063183" Sep 29 19:09:39 crc kubenswrapper[4779]: I0929 19:09:39.812848 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:39 crc kubenswrapper[4779]: I0929 19:09:39.812917 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:39 crc kubenswrapper[4779]: I0929 19:09:39.812935 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:39 crc kubenswrapper[4779]: I0929 19:09:39.812955 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:39 crc kubenswrapper[4779]: I0929 19:09:39.813001 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:39Z","lastTransitionTime":"2025-09-29T19:09:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:39 crc kubenswrapper[4779]: I0929 19:09:39.863723 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd/etcd-crc" podStartSLOduration=10.863700302 podStartE2EDuration="10.863700302s" podCreationTimestamp="2025-09-29 19:09:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 19:09:39.86100412 +0000 UTC m=+90.745429230" watchObservedRunningTime="2025-09-29 19:09:39.863700302 +0000 UTC m=+90.748125422" Sep 29 19:09:39 crc kubenswrapper[4779]: I0929 19:09:39.916384 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:39 crc kubenswrapper[4779]: I0929 19:09:39.916424 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:39 crc kubenswrapper[4779]: I0929 19:09:39.916435 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:39 crc kubenswrapper[4779]: I0929 19:09:39.916451 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:39 crc kubenswrapper[4779]: I0929 19:09:39.916464 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:39Z","lastTransitionTime":"2025-09-29T19:09:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:39 crc kubenswrapper[4779]: I0929 19:09:39.953679 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-jfbb6" podStartSLOduration=70.953663399 podStartE2EDuration="1m10.953663399s" podCreationTimestamp="2025-09-29 19:08:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 19:09:39.936706714 +0000 UTC m=+90.821131834" watchObservedRunningTime="2025-09-29 19:09:39.953663399 +0000 UTC m=+90.838088499" Sep 29 19:09:39 crc kubenswrapper[4779]: I0929 19:09:39.967104 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-additional-cni-plugins-cvx8m" podStartSLOduration=70.96708794 podStartE2EDuration="1m10.96708794s" podCreationTimestamp="2025-09-29 19:08:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 19:09:39.954409359 +0000 UTC m=+90.838834459" watchObservedRunningTime="2025-09-29 19:09:39.96708794 +0000 UTC m=+90.851513040" Sep 29 19:09:39 crc kubenswrapper[4779]: I0929 19:09:39.967410 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podStartSLOduration=65.967406108 podStartE2EDuration="1m5.967406108s" podCreationTimestamp="2025-09-29 19:08:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 19:09:39.967220513 +0000 UTC m=+90.851645613" watchObservedRunningTime="2025-09-29 19:09:39.967406108 +0000 UTC m=+90.851831208" Sep 29 19:09:40 crc kubenswrapper[4779]: I0929 19:09:40.019100 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:40 crc kubenswrapper[4779]: I0929 19:09:40.019133 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:40 crc kubenswrapper[4779]: I0929 19:09:40.019143 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:40 crc kubenswrapper[4779]: I0929 19:09:40.019156 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:40 crc kubenswrapper[4779]: I0929 19:09:40.019168 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:40Z","lastTransitionTime":"2025-09-29T19:09:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:40 crc kubenswrapper[4779]: I0929 19:09:40.031965 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" podStartSLOduration=34.031949242 podStartE2EDuration="34.031949242s" podCreationTimestamp="2025-09-29 19:09:06 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 19:09:40.030921064 +0000 UTC m=+90.915346164" watchObservedRunningTime="2025-09-29 19:09:40.031949242 +0000 UTC m=+90.916374342" Sep 29 19:09:40 crc kubenswrapper[4779]: I0929 19:09:40.032099 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/node-ca-zxpg4" podStartSLOduration=71.032094656 podStartE2EDuration="1m11.032094656s" podCreationTimestamp="2025-09-29 19:08:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 19:09:40.018265704 +0000 UTC m=+90.902690814" watchObservedRunningTime="2025-09-29 19:09:40.032094656 +0000 UTC m=+90.916519756" Sep 29 19:09:40 crc kubenswrapper[4779]: I0929 19:09:40.047779 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-crc" podStartSLOduration=70.047764737 podStartE2EDuration="1m10.047764737s" podCreationTimestamp="2025-09-29 19:08:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 19:09:40.047747386 +0000 UTC m=+90.932172486" watchObservedRunningTime="2025-09-29 19:09:40.047764737 +0000 UTC m=+90.932189837" Sep 29 19:09:40 crc kubenswrapper[4779]: I0929 19:09:40.071865 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/node-resolver-7hb2m" podStartSLOduration=71.071845574 podStartE2EDuration="1m11.071845574s" podCreationTimestamp="2025-09-29 19:08:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 19:09:40.070387954 +0000 UTC m=+90.954813074" watchObservedRunningTime="2025-09-29 19:09:40.071845574 +0000 UTC m=+90.956270694" Sep 29 19:09:40 crc kubenswrapper[4779]: I0929 19:09:40.121507 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:40 crc kubenswrapper[4779]: I0929 19:09:40.121563 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:40 crc kubenswrapper[4779]: I0929 19:09:40.121581 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:40 crc kubenswrapper[4779]: I0929 19:09:40.121602 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:40 crc kubenswrapper[4779]: I0929 19:09:40.121617 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:40Z","lastTransitionTime":"2025-09-29T19:09:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:40 crc kubenswrapper[4779]: I0929 19:09:40.224350 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:40 crc kubenswrapper[4779]: I0929 19:09:40.224389 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:40 crc kubenswrapper[4779]: I0929 19:09:40.224398 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:40 crc kubenswrapper[4779]: I0929 19:09:40.224412 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:40 crc kubenswrapper[4779]: I0929 19:09:40.224421 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:40Z","lastTransitionTime":"2025-09-29T19:09:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:40 crc kubenswrapper[4779]: I0929 19:09:40.326575 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:40 crc kubenswrapper[4779]: I0929 19:09:40.326639 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:40 crc kubenswrapper[4779]: I0929 19:09:40.326648 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:40 crc kubenswrapper[4779]: I0929 19:09:40.326661 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:40 crc kubenswrapper[4779]: I0929 19:09:40.326669 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:40Z","lastTransitionTime":"2025-09-29T19:09:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:40 crc kubenswrapper[4779]: I0929 19:09:40.428652 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:40 crc kubenswrapper[4779]: I0929 19:09:40.428695 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:40 crc kubenswrapper[4779]: I0929 19:09:40.428712 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:40 crc kubenswrapper[4779]: I0929 19:09:40.428738 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:40 crc kubenswrapper[4779]: I0929 19:09:40.428760 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:40Z","lastTransitionTime":"2025-09-29T19:09:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:40 crc kubenswrapper[4779]: I0929 19:09:40.531831 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:40 crc kubenswrapper[4779]: I0929 19:09:40.531892 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:40 crc kubenswrapper[4779]: I0929 19:09:40.531905 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:40 crc kubenswrapper[4779]: I0929 19:09:40.531932 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:40 crc kubenswrapper[4779]: I0929 19:09:40.531949 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:40Z","lastTransitionTime":"2025-09-29T19:09:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:40 crc kubenswrapper[4779]: I0929 19:09:40.634455 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:40 crc kubenswrapper[4779]: I0929 19:09:40.634495 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:40 crc kubenswrapper[4779]: I0929 19:09:40.634507 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:40 crc kubenswrapper[4779]: I0929 19:09:40.634519 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:40 crc kubenswrapper[4779]: I0929 19:09:40.634528 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:40Z","lastTransitionTime":"2025-09-29T19:09:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:40 crc kubenswrapper[4779]: I0929 19:09:40.737139 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:40 crc kubenswrapper[4779]: I0929 19:09:40.737242 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:40 crc kubenswrapper[4779]: I0929 19:09:40.737260 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:40 crc kubenswrapper[4779]: I0929 19:09:40.737284 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:40 crc kubenswrapper[4779]: I0929 19:09:40.737301 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:40Z","lastTransitionTime":"2025-09-29T19:09:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:40 crc kubenswrapper[4779]: I0929 19:09:40.765889 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 19:09:40 crc kubenswrapper[4779]: I0929 19:09:40.765949 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 19:09:40 crc kubenswrapper[4779]: I0929 19:09:40.765950 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 19:09:40 crc kubenswrapper[4779]: I0929 19:09:40.765889 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-2rtwf" Sep 29 19:09:40 crc kubenswrapper[4779]: E0929 19:09:40.766072 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 19:09:40 crc kubenswrapper[4779]: E0929 19:09:40.766223 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 19:09:40 crc kubenswrapper[4779]: E0929 19:09:40.766305 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 19:09:40 crc kubenswrapper[4779]: E0929 19:09:40.766447 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-2rtwf" podUID="4df079c4-34e3-4132-91bb-ad68488552f8" Sep 29 19:09:40 crc kubenswrapper[4779]: I0929 19:09:40.839197 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:40 crc kubenswrapper[4779]: I0929 19:09:40.839234 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:40 crc kubenswrapper[4779]: I0929 19:09:40.839245 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:40 crc kubenswrapper[4779]: I0929 19:09:40.839279 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:40 crc kubenswrapper[4779]: I0929 19:09:40.839289 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:40Z","lastTransitionTime":"2025-09-29T19:09:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:40 crc kubenswrapper[4779]: I0929 19:09:40.942056 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:40 crc kubenswrapper[4779]: I0929 19:09:40.942109 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:40 crc kubenswrapper[4779]: I0929 19:09:40.942120 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:40 crc kubenswrapper[4779]: I0929 19:09:40.942154 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:40 crc kubenswrapper[4779]: I0929 19:09:40.942165 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:40Z","lastTransitionTime":"2025-09-29T19:09:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:41 crc kubenswrapper[4779]: I0929 19:09:41.045456 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:41 crc kubenswrapper[4779]: I0929 19:09:41.045485 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:41 crc kubenswrapper[4779]: I0929 19:09:41.045493 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:41 crc kubenswrapper[4779]: I0929 19:09:41.045506 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:41 crc kubenswrapper[4779]: I0929 19:09:41.045515 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:41Z","lastTransitionTime":"2025-09-29T19:09:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:41 crc kubenswrapper[4779]: I0929 19:09:41.147405 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:41 crc kubenswrapper[4779]: I0929 19:09:41.147467 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:41 crc kubenswrapper[4779]: I0929 19:09:41.147490 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:41 crc kubenswrapper[4779]: I0929 19:09:41.147520 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:41 crc kubenswrapper[4779]: I0929 19:09:41.147540 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:41Z","lastTransitionTime":"2025-09-29T19:09:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:41 crc kubenswrapper[4779]: I0929 19:09:41.249581 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:41 crc kubenswrapper[4779]: I0929 19:09:41.249634 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:41 crc kubenswrapper[4779]: I0929 19:09:41.249665 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:41 crc kubenswrapper[4779]: I0929 19:09:41.249678 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:41 crc kubenswrapper[4779]: I0929 19:09:41.249686 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:41Z","lastTransitionTime":"2025-09-29T19:09:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:41 crc kubenswrapper[4779]: I0929 19:09:41.351728 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:41 crc kubenswrapper[4779]: I0929 19:09:41.351770 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:41 crc kubenswrapper[4779]: I0929 19:09:41.351782 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:41 crc kubenswrapper[4779]: I0929 19:09:41.351799 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:41 crc kubenswrapper[4779]: I0929 19:09:41.351812 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:41Z","lastTransitionTime":"2025-09-29T19:09:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:41 crc kubenswrapper[4779]: I0929 19:09:41.455028 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:41 crc kubenswrapper[4779]: I0929 19:09:41.455063 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:41 crc kubenswrapper[4779]: I0929 19:09:41.455098 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:41 crc kubenswrapper[4779]: I0929 19:09:41.455113 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:41 crc kubenswrapper[4779]: I0929 19:09:41.455124 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:41Z","lastTransitionTime":"2025-09-29T19:09:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:41 crc kubenswrapper[4779]: I0929 19:09:41.557188 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:41 crc kubenswrapper[4779]: I0929 19:09:41.557215 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:41 crc kubenswrapper[4779]: I0929 19:09:41.557222 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:41 crc kubenswrapper[4779]: I0929 19:09:41.557237 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:41 crc kubenswrapper[4779]: I0929 19:09:41.557246 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:41Z","lastTransitionTime":"2025-09-29T19:09:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:41 crc kubenswrapper[4779]: I0929 19:09:41.660608 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:41 crc kubenswrapper[4779]: I0929 19:09:41.660653 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:41 crc kubenswrapper[4779]: I0929 19:09:41.660663 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:41 crc kubenswrapper[4779]: I0929 19:09:41.660682 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:41 crc kubenswrapper[4779]: I0929 19:09:41.660692 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:41Z","lastTransitionTime":"2025-09-29T19:09:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:41 crc kubenswrapper[4779]: I0929 19:09:41.763372 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:41 crc kubenswrapper[4779]: I0929 19:09:41.763434 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:41 crc kubenswrapper[4779]: I0929 19:09:41.763443 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:41 crc kubenswrapper[4779]: I0929 19:09:41.763459 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:41 crc kubenswrapper[4779]: I0929 19:09:41.763469 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:41Z","lastTransitionTime":"2025-09-29T19:09:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:41 crc kubenswrapper[4779]: I0929 19:09:41.778800 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fvdtc" podStartSLOduration=71.778783164 podStartE2EDuration="1m11.778783164s" podCreationTimestamp="2025-09-29 19:08:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 19:09:40.10895498 +0000 UTC m=+90.993380100" watchObservedRunningTime="2025-09-29 19:09:41.778783164 +0000 UTC m=+92.663208264" Sep 29 19:09:41 crc kubenswrapper[4779]: I0929 19:09:41.779688 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/kube-rbac-proxy-crio-crc"] Sep 29 19:09:41 crc kubenswrapper[4779]: I0929 19:09:41.865813 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:41 crc kubenswrapper[4779]: I0929 19:09:41.865879 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:41 crc kubenswrapper[4779]: I0929 19:09:41.865897 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:41 crc kubenswrapper[4779]: I0929 19:09:41.865920 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:41 crc kubenswrapper[4779]: I0929 19:09:41.865936 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:41Z","lastTransitionTime":"2025-09-29T19:09:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:41 crc kubenswrapper[4779]: I0929 19:09:41.968857 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:41 crc kubenswrapper[4779]: I0929 19:09:41.968902 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:41 crc kubenswrapper[4779]: I0929 19:09:41.968912 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:41 crc kubenswrapper[4779]: I0929 19:09:41.968931 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:41 crc kubenswrapper[4779]: I0929 19:09:41.968944 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:41Z","lastTransitionTime":"2025-09-29T19:09:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:42 crc kubenswrapper[4779]: I0929 19:09:42.071984 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:42 crc kubenswrapper[4779]: I0929 19:09:42.072017 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:42 crc kubenswrapper[4779]: I0929 19:09:42.072028 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:42 crc kubenswrapper[4779]: I0929 19:09:42.072041 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:42 crc kubenswrapper[4779]: I0929 19:09:42.072050 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:42Z","lastTransitionTime":"2025-09-29T19:09:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:42 crc kubenswrapper[4779]: I0929 19:09:42.174511 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:42 crc kubenswrapper[4779]: I0929 19:09:42.174551 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:42 crc kubenswrapper[4779]: I0929 19:09:42.174562 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:42 crc kubenswrapper[4779]: I0929 19:09:42.174577 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:42 crc kubenswrapper[4779]: I0929 19:09:42.174589 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:42Z","lastTransitionTime":"2025-09-29T19:09:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:42 crc kubenswrapper[4779]: I0929 19:09:42.277075 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:42 crc kubenswrapper[4779]: I0929 19:09:42.277118 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:42 crc kubenswrapper[4779]: I0929 19:09:42.277127 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:42 crc kubenswrapper[4779]: I0929 19:09:42.277141 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:42 crc kubenswrapper[4779]: I0929 19:09:42.277152 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:42Z","lastTransitionTime":"2025-09-29T19:09:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:42 crc kubenswrapper[4779]: I0929 19:09:42.379809 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:42 crc kubenswrapper[4779]: I0929 19:09:42.379874 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:42 crc kubenswrapper[4779]: I0929 19:09:42.379898 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:42 crc kubenswrapper[4779]: I0929 19:09:42.379932 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:42 crc kubenswrapper[4779]: I0929 19:09:42.379959 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:42Z","lastTransitionTime":"2025-09-29T19:09:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:42 crc kubenswrapper[4779]: I0929 19:09:42.483009 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:42 crc kubenswrapper[4779]: I0929 19:09:42.483076 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:42 crc kubenswrapper[4779]: I0929 19:09:42.483556 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:42 crc kubenswrapper[4779]: I0929 19:09:42.483601 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:42 crc kubenswrapper[4779]: I0929 19:09:42.483621 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:42Z","lastTransitionTime":"2025-09-29T19:09:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:42 crc kubenswrapper[4779]: I0929 19:09:42.587360 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:42 crc kubenswrapper[4779]: I0929 19:09:42.587428 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:42 crc kubenswrapper[4779]: I0929 19:09:42.587445 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:42 crc kubenswrapper[4779]: I0929 19:09:42.587468 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:42 crc kubenswrapper[4779]: I0929 19:09:42.587484 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:42Z","lastTransitionTime":"2025-09-29T19:09:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:42 crc kubenswrapper[4779]: I0929 19:09:42.690476 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:42 crc kubenswrapper[4779]: I0929 19:09:42.690582 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:42 crc kubenswrapper[4779]: I0929 19:09:42.690606 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:42 crc kubenswrapper[4779]: I0929 19:09:42.690635 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:42 crc kubenswrapper[4779]: I0929 19:09:42.690652 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:42Z","lastTransitionTime":"2025-09-29T19:09:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:42 crc kubenswrapper[4779]: I0929 19:09:42.765816 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-2rtwf" Sep 29 19:09:42 crc kubenswrapper[4779]: I0929 19:09:42.765860 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 19:09:42 crc kubenswrapper[4779]: I0929 19:09:42.765923 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 19:09:42 crc kubenswrapper[4779]: I0929 19:09:42.766106 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 19:09:42 crc kubenswrapper[4779]: E0929 19:09:42.766195 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-2rtwf" podUID="4df079c4-34e3-4132-91bb-ad68488552f8" Sep 29 19:09:42 crc kubenswrapper[4779]: E0929 19:09:42.766358 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 19:09:42 crc kubenswrapper[4779]: E0929 19:09:42.766444 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 19:09:42 crc kubenswrapper[4779]: I0929 19:09:42.766525 4779 scope.go:117] "RemoveContainer" containerID="2e04b0f18f7180f57e083d45291ba413fcda4de8f92aa53189b296e6bc5c3905" Sep 29 19:09:42 crc kubenswrapper[4779]: E0929 19:09:42.766623 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 19:09:42 crc kubenswrapper[4779]: E0929 19:09:42.766677 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-42vjg_openshift-ovn-kubernetes(046df2ef-fb75-4d32-93e6-17b36af0a7c2)\"" pod="openshift-ovn-kubernetes/ovnkube-node-42vjg" podUID="046df2ef-fb75-4d32-93e6-17b36af0a7c2" Sep 29 19:09:42 crc kubenswrapper[4779]: I0929 19:09:42.793074 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:42 crc kubenswrapper[4779]: I0929 19:09:42.793136 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:42 crc kubenswrapper[4779]: I0929 19:09:42.793154 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:42 crc kubenswrapper[4779]: I0929 19:09:42.793178 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:42 crc kubenswrapper[4779]: I0929 19:09:42.793196 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:42Z","lastTransitionTime":"2025-09-29T19:09:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:42 crc kubenswrapper[4779]: I0929 19:09:42.895575 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:42 crc kubenswrapper[4779]: I0929 19:09:42.895624 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:42 crc kubenswrapper[4779]: I0929 19:09:42.895635 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:42 crc kubenswrapper[4779]: I0929 19:09:42.895651 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:42 crc kubenswrapper[4779]: I0929 19:09:42.895661 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:42Z","lastTransitionTime":"2025-09-29T19:09:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:42 crc kubenswrapper[4779]: I0929 19:09:42.999269 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:42 crc kubenswrapper[4779]: I0929 19:09:42.999360 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:42 crc kubenswrapper[4779]: I0929 19:09:42.999379 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:42 crc kubenswrapper[4779]: I0929 19:09:42.999401 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:42 crc kubenswrapper[4779]: I0929 19:09:42.999419 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:42Z","lastTransitionTime":"2025-09-29T19:09:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:43 crc kubenswrapper[4779]: I0929 19:09:43.102440 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:43 crc kubenswrapper[4779]: I0929 19:09:43.102492 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:43 crc kubenswrapper[4779]: I0929 19:09:43.102503 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:43 crc kubenswrapper[4779]: I0929 19:09:43.102520 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:43 crc kubenswrapper[4779]: I0929 19:09:43.102535 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:43Z","lastTransitionTime":"2025-09-29T19:09:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:43 crc kubenswrapper[4779]: I0929 19:09:43.205195 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:43 crc kubenswrapper[4779]: I0929 19:09:43.205619 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:43 crc kubenswrapper[4779]: I0929 19:09:43.205890 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:43 crc kubenswrapper[4779]: I0929 19:09:43.206681 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:43 crc kubenswrapper[4779]: I0929 19:09:43.206907 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:43Z","lastTransitionTime":"2025-09-29T19:09:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:43 crc kubenswrapper[4779]: I0929 19:09:43.310098 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:43 crc kubenswrapper[4779]: I0929 19:09:43.310194 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:43 crc kubenswrapper[4779]: I0929 19:09:43.310220 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:43 crc kubenswrapper[4779]: I0929 19:09:43.310258 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:43 crc kubenswrapper[4779]: I0929 19:09:43.310287 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:43Z","lastTransitionTime":"2025-09-29T19:09:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:43 crc kubenswrapper[4779]: I0929 19:09:43.413819 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:43 crc kubenswrapper[4779]: I0929 19:09:43.413880 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:43 crc kubenswrapper[4779]: I0929 19:09:43.413893 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:43 crc kubenswrapper[4779]: I0929 19:09:43.413913 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:43 crc kubenswrapper[4779]: I0929 19:09:43.413930 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:43Z","lastTransitionTime":"2025-09-29T19:09:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:43 crc kubenswrapper[4779]: I0929 19:09:43.516123 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:43 crc kubenswrapper[4779]: I0929 19:09:43.516164 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:43 crc kubenswrapper[4779]: I0929 19:09:43.516178 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:43 crc kubenswrapper[4779]: I0929 19:09:43.516193 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:43 crc kubenswrapper[4779]: I0929 19:09:43.516204 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:43Z","lastTransitionTime":"2025-09-29T19:09:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:43 crc kubenswrapper[4779]: I0929 19:09:43.619412 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:43 crc kubenswrapper[4779]: I0929 19:09:43.619491 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:43 crc kubenswrapper[4779]: I0929 19:09:43.619505 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:43 crc kubenswrapper[4779]: I0929 19:09:43.619528 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:43 crc kubenswrapper[4779]: I0929 19:09:43.619543 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:43Z","lastTransitionTime":"2025-09-29T19:09:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:43 crc kubenswrapper[4779]: I0929 19:09:43.722784 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:43 crc kubenswrapper[4779]: I0929 19:09:43.722843 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:43 crc kubenswrapper[4779]: I0929 19:09:43.722860 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:43 crc kubenswrapper[4779]: I0929 19:09:43.722899 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:43 crc kubenswrapper[4779]: I0929 19:09:43.722917 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:43Z","lastTransitionTime":"2025-09-29T19:09:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:43 crc kubenswrapper[4779]: I0929 19:09:43.826451 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:43 crc kubenswrapper[4779]: I0929 19:09:43.826516 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:43 crc kubenswrapper[4779]: I0929 19:09:43.826538 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:43 crc kubenswrapper[4779]: I0929 19:09:43.826562 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:43 crc kubenswrapper[4779]: I0929 19:09:43.826580 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:43Z","lastTransitionTime":"2025-09-29T19:09:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:43 crc kubenswrapper[4779]: I0929 19:09:43.929953 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:43 crc kubenswrapper[4779]: I0929 19:09:43.930005 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:43 crc kubenswrapper[4779]: I0929 19:09:43.930018 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:43 crc kubenswrapper[4779]: I0929 19:09:43.930041 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:43 crc kubenswrapper[4779]: I0929 19:09:43.930055 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:43Z","lastTransitionTime":"2025-09-29T19:09:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:44 crc kubenswrapper[4779]: I0929 19:09:44.033620 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:44 crc kubenswrapper[4779]: I0929 19:09:44.033669 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:44 crc kubenswrapper[4779]: I0929 19:09:44.033682 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:44 crc kubenswrapper[4779]: I0929 19:09:44.033697 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:44 crc kubenswrapper[4779]: I0929 19:09:44.033709 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:44Z","lastTransitionTime":"2025-09-29T19:09:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:44 crc kubenswrapper[4779]: I0929 19:09:44.137477 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:44 crc kubenswrapper[4779]: I0929 19:09:44.137556 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:44 crc kubenswrapper[4779]: I0929 19:09:44.137577 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:44 crc kubenswrapper[4779]: I0929 19:09:44.137606 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:44 crc kubenswrapper[4779]: I0929 19:09:44.137626 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:44Z","lastTransitionTime":"2025-09-29T19:09:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:44 crc kubenswrapper[4779]: I0929 19:09:44.240969 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:44 crc kubenswrapper[4779]: I0929 19:09:44.241021 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:44 crc kubenswrapper[4779]: I0929 19:09:44.241035 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:44 crc kubenswrapper[4779]: I0929 19:09:44.241058 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:44 crc kubenswrapper[4779]: I0929 19:09:44.241073 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:44Z","lastTransitionTime":"2025-09-29T19:09:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:44 crc kubenswrapper[4779]: I0929 19:09:44.344636 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:44 crc kubenswrapper[4779]: I0929 19:09:44.344703 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:44 crc kubenswrapper[4779]: I0929 19:09:44.344716 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:44 crc kubenswrapper[4779]: I0929 19:09:44.344736 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:44 crc kubenswrapper[4779]: I0929 19:09:44.344748 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:44Z","lastTransitionTime":"2025-09-29T19:09:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:44 crc kubenswrapper[4779]: I0929 19:09:44.448217 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:44 crc kubenswrapper[4779]: I0929 19:09:44.448375 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:44 crc kubenswrapper[4779]: I0929 19:09:44.448411 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:44 crc kubenswrapper[4779]: I0929 19:09:44.448446 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:44 crc kubenswrapper[4779]: I0929 19:09:44.448467 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:44Z","lastTransitionTime":"2025-09-29T19:09:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:44 crc kubenswrapper[4779]: I0929 19:09:44.551154 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:44 crc kubenswrapper[4779]: I0929 19:09:44.551239 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:44 crc kubenswrapper[4779]: I0929 19:09:44.551258 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:44 crc kubenswrapper[4779]: I0929 19:09:44.551345 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:44 crc kubenswrapper[4779]: I0929 19:09:44.551368 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:44Z","lastTransitionTime":"2025-09-29T19:09:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:44 crc kubenswrapper[4779]: I0929 19:09:44.653960 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:44 crc kubenswrapper[4779]: I0929 19:09:44.654011 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:44 crc kubenswrapper[4779]: I0929 19:09:44.654022 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:44 crc kubenswrapper[4779]: I0929 19:09:44.654046 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:44 crc kubenswrapper[4779]: I0929 19:09:44.654060 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:44Z","lastTransitionTime":"2025-09-29T19:09:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:44 crc kubenswrapper[4779]: I0929 19:09:44.757382 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:44 crc kubenswrapper[4779]: I0929 19:09:44.757447 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:44 crc kubenswrapper[4779]: I0929 19:09:44.757462 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:44 crc kubenswrapper[4779]: I0929 19:09:44.757486 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:44 crc kubenswrapper[4779]: I0929 19:09:44.757500 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:44Z","lastTransitionTime":"2025-09-29T19:09:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:44 crc kubenswrapper[4779]: I0929 19:09:44.765709 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 19:09:44 crc kubenswrapper[4779]: I0929 19:09:44.765809 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 19:09:44 crc kubenswrapper[4779]: I0929 19:09:44.765837 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-2rtwf" Sep 29 19:09:44 crc kubenswrapper[4779]: I0929 19:09:44.765842 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 19:09:44 crc kubenswrapper[4779]: E0929 19:09:44.765920 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 19:09:44 crc kubenswrapper[4779]: E0929 19:09:44.766190 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-2rtwf" podUID="4df079c4-34e3-4132-91bb-ad68488552f8" Sep 29 19:09:44 crc kubenswrapper[4779]: E0929 19:09:44.766268 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 19:09:44 crc kubenswrapper[4779]: E0929 19:09:44.766405 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 19:09:44 crc kubenswrapper[4779]: I0929 19:09:44.859347 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:44 crc kubenswrapper[4779]: I0929 19:09:44.859406 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:44 crc kubenswrapper[4779]: I0929 19:09:44.859438 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:44 crc kubenswrapper[4779]: I0929 19:09:44.859466 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:44 crc kubenswrapper[4779]: I0929 19:09:44.859485 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:44Z","lastTransitionTime":"2025-09-29T19:09:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:44 crc kubenswrapper[4779]: I0929 19:09:44.962514 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:44 crc kubenswrapper[4779]: I0929 19:09:44.962580 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:44 crc kubenswrapper[4779]: I0929 19:09:44.962596 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:44 crc kubenswrapper[4779]: I0929 19:09:44.962618 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:44 crc kubenswrapper[4779]: I0929 19:09:44.962639 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:44Z","lastTransitionTime":"2025-09-29T19:09:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:45 crc kubenswrapper[4779]: I0929 19:09:45.065711 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:45 crc kubenswrapper[4779]: I0929 19:09:45.065798 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:45 crc kubenswrapper[4779]: I0929 19:09:45.065828 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:45 crc kubenswrapper[4779]: I0929 19:09:45.065857 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:45 crc kubenswrapper[4779]: I0929 19:09:45.065878 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:45Z","lastTransitionTime":"2025-09-29T19:09:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:45 crc kubenswrapper[4779]: I0929 19:09:45.173252 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:45 crc kubenswrapper[4779]: I0929 19:09:45.173923 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:45 crc kubenswrapper[4779]: I0929 19:09:45.173953 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:45 crc kubenswrapper[4779]: I0929 19:09:45.173973 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:45 crc kubenswrapper[4779]: I0929 19:09:45.173985 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:45Z","lastTransitionTime":"2025-09-29T19:09:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:45 crc kubenswrapper[4779]: I0929 19:09:45.277118 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:45 crc kubenswrapper[4779]: I0929 19:09:45.277178 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:45 crc kubenswrapper[4779]: I0929 19:09:45.277196 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:45 crc kubenswrapper[4779]: I0929 19:09:45.277219 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:45 crc kubenswrapper[4779]: I0929 19:09:45.277236 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:45Z","lastTransitionTime":"2025-09-29T19:09:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:45 crc kubenswrapper[4779]: I0929 19:09:45.380962 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:45 crc kubenswrapper[4779]: I0929 19:09:45.381032 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:45 crc kubenswrapper[4779]: I0929 19:09:45.381056 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:45 crc kubenswrapper[4779]: I0929 19:09:45.381085 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:45 crc kubenswrapper[4779]: I0929 19:09:45.381107 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:45Z","lastTransitionTime":"2025-09-29T19:09:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:45 crc kubenswrapper[4779]: I0929 19:09:45.483357 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:45 crc kubenswrapper[4779]: I0929 19:09:45.483487 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:45 crc kubenswrapper[4779]: I0929 19:09:45.483501 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:45 crc kubenswrapper[4779]: I0929 19:09:45.483517 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:45 crc kubenswrapper[4779]: I0929 19:09:45.483528 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:45Z","lastTransitionTime":"2025-09-29T19:09:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:45 crc kubenswrapper[4779]: I0929 19:09:45.585930 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:45 crc kubenswrapper[4779]: I0929 19:09:45.585979 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:45 crc kubenswrapper[4779]: I0929 19:09:45.585990 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:45 crc kubenswrapper[4779]: I0929 19:09:45.586008 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:45 crc kubenswrapper[4779]: I0929 19:09:45.586020 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:45Z","lastTransitionTime":"2025-09-29T19:09:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:45 crc kubenswrapper[4779]: I0929 19:09:45.688732 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:45 crc kubenswrapper[4779]: I0929 19:09:45.688794 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:45 crc kubenswrapper[4779]: I0929 19:09:45.688812 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:45 crc kubenswrapper[4779]: I0929 19:09:45.688836 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:45 crc kubenswrapper[4779]: I0929 19:09:45.688853 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:45Z","lastTransitionTime":"2025-09-29T19:09:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:45 crc kubenswrapper[4779]: I0929 19:09:45.791461 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:45 crc kubenswrapper[4779]: I0929 19:09:45.791514 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:45 crc kubenswrapper[4779]: I0929 19:09:45.791533 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:45 crc kubenswrapper[4779]: I0929 19:09:45.791554 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:45 crc kubenswrapper[4779]: I0929 19:09:45.791571 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:45Z","lastTransitionTime":"2025-09-29T19:09:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:45 crc kubenswrapper[4779]: I0929 19:09:45.894497 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:45 crc kubenswrapper[4779]: I0929 19:09:45.894559 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:45 crc kubenswrapper[4779]: I0929 19:09:45.894576 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:45 crc kubenswrapper[4779]: I0929 19:09:45.894602 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:45 crc kubenswrapper[4779]: I0929 19:09:45.894620 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:45Z","lastTransitionTime":"2025-09-29T19:09:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:45 crc kubenswrapper[4779]: I0929 19:09:45.997899 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:45 crc kubenswrapper[4779]: I0929 19:09:45.997940 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:45 crc kubenswrapper[4779]: I0929 19:09:45.997979 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:45 crc kubenswrapper[4779]: I0929 19:09:45.997996 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:45 crc kubenswrapper[4779]: I0929 19:09:45.998007 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:45Z","lastTransitionTime":"2025-09-29T19:09:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:46 crc kubenswrapper[4779]: I0929 19:09:46.101225 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:46 crc kubenswrapper[4779]: I0929 19:09:46.101272 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:46 crc kubenswrapper[4779]: I0929 19:09:46.101281 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:46 crc kubenswrapper[4779]: I0929 19:09:46.101295 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:46 crc kubenswrapper[4779]: I0929 19:09:46.101305 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:46Z","lastTransitionTime":"2025-09-29T19:09:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:46 crc kubenswrapper[4779]: I0929 19:09:46.205565 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:46 crc kubenswrapper[4779]: I0929 19:09:46.205629 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:46 crc kubenswrapper[4779]: I0929 19:09:46.205646 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:46 crc kubenswrapper[4779]: I0929 19:09:46.205668 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:46 crc kubenswrapper[4779]: I0929 19:09:46.205685 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:46Z","lastTransitionTime":"2025-09-29T19:09:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:46 crc kubenswrapper[4779]: I0929 19:09:46.308174 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:46 crc kubenswrapper[4779]: I0929 19:09:46.308246 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:46 crc kubenswrapper[4779]: I0929 19:09:46.308270 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:46 crc kubenswrapper[4779]: I0929 19:09:46.308298 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:46 crc kubenswrapper[4779]: I0929 19:09:46.308355 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:46Z","lastTransitionTime":"2025-09-29T19:09:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:46 crc kubenswrapper[4779]: I0929 19:09:46.410673 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:46 crc kubenswrapper[4779]: I0929 19:09:46.410724 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:46 crc kubenswrapper[4779]: I0929 19:09:46.410749 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:46 crc kubenswrapper[4779]: I0929 19:09:46.410763 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:46 crc kubenswrapper[4779]: I0929 19:09:46.410772 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:46Z","lastTransitionTime":"2025-09-29T19:09:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:46 crc kubenswrapper[4779]: I0929 19:09:46.514103 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:46 crc kubenswrapper[4779]: I0929 19:09:46.514148 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:46 crc kubenswrapper[4779]: I0929 19:09:46.514159 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:46 crc kubenswrapper[4779]: I0929 19:09:46.514175 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:46 crc kubenswrapper[4779]: I0929 19:09:46.514186 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:46Z","lastTransitionTime":"2025-09-29T19:09:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:46 crc kubenswrapper[4779]: I0929 19:09:46.617276 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:46 crc kubenswrapper[4779]: I0929 19:09:46.617562 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:46 crc kubenswrapper[4779]: I0929 19:09:46.617631 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:46 crc kubenswrapper[4779]: I0929 19:09:46.617696 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:46 crc kubenswrapper[4779]: I0929 19:09:46.617765 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:46Z","lastTransitionTime":"2025-09-29T19:09:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:46 crc kubenswrapper[4779]: I0929 19:09:46.721993 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:46 crc kubenswrapper[4779]: I0929 19:09:46.722085 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:46 crc kubenswrapper[4779]: I0929 19:09:46.722109 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:46 crc kubenswrapper[4779]: I0929 19:09:46.722144 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:46 crc kubenswrapper[4779]: I0929 19:09:46.722168 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:46Z","lastTransitionTime":"2025-09-29T19:09:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:46 crc kubenswrapper[4779]: I0929 19:09:46.765900 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 19:09:46 crc kubenswrapper[4779]: I0929 19:09:46.765964 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 19:09:46 crc kubenswrapper[4779]: I0929 19:09:46.765924 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 19:09:46 crc kubenswrapper[4779]: E0929 19:09:46.766071 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 19:09:46 crc kubenswrapper[4779]: I0929 19:09:46.766147 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-2rtwf" Sep 29 19:09:46 crc kubenswrapper[4779]: E0929 19:09:46.766448 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 19:09:46 crc kubenswrapper[4779]: E0929 19:09:46.766608 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-2rtwf" podUID="4df079c4-34e3-4132-91bb-ad68488552f8" Sep 29 19:09:46 crc kubenswrapper[4779]: E0929 19:09:46.766674 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 19:09:46 crc kubenswrapper[4779]: I0929 19:09:46.825770 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:46 crc kubenswrapper[4779]: I0929 19:09:46.825847 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:46 crc kubenswrapper[4779]: I0929 19:09:46.825861 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:46 crc kubenswrapper[4779]: I0929 19:09:46.825882 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:46 crc kubenswrapper[4779]: I0929 19:09:46.825896 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:46Z","lastTransitionTime":"2025-09-29T19:09:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:46 crc kubenswrapper[4779]: I0929 19:09:46.928910 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:46 crc kubenswrapper[4779]: I0929 19:09:46.928977 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:46 crc kubenswrapper[4779]: I0929 19:09:46.928993 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:46 crc kubenswrapper[4779]: I0929 19:09:46.929018 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:46 crc kubenswrapper[4779]: I0929 19:09:46.929045 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:46Z","lastTransitionTime":"2025-09-29T19:09:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:47 crc kubenswrapper[4779]: I0929 19:09:47.033479 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:47 crc kubenswrapper[4779]: I0929 19:09:47.033550 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:47 crc kubenswrapper[4779]: I0929 19:09:47.033574 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:47 crc kubenswrapper[4779]: I0929 19:09:47.033600 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:47 crc kubenswrapper[4779]: I0929 19:09:47.033618 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:47Z","lastTransitionTime":"2025-09-29T19:09:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:47 crc kubenswrapper[4779]: I0929 19:09:47.136975 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:47 crc kubenswrapper[4779]: I0929 19:09:47.137084 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:47 crc kubenswrapper[4779]: I0929 19:09:47.137104 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:47 crc kubenswrapper[4779]: I0929 19:09:47.137136 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:47 crc kubenswrapper[4779]: I0929 19:09:47.137158 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:47Z","lastTransitionTime":"2025-09-29T19:09:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:47 crc kubenswrapper[4779]: I0929 19:09:47.240730 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:47 crc kubenswrapper[4779]: I0929 19:09:47.240791 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:47 crc kubenswrapper[4779]: I0929 19:09:47.240807 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:47 crc kubenswrapper[4779]: I0929 19:09:47.240830 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:47 crc kubenswrapper[4779]: I0929 19:09:47.240847 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:47Z","lastTransitionTime":"2025-09-29T19:09:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:47 crc kubenswrapper[4779]: I0929 19:09:47.343728 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:47 crc kubenswrapper[4779]: I0929 19:09:47.343792 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:47 crc kubenswrapper[4779]: I0929 19:09:47.343811 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:47 crc kubenswrapper[4779]: I0929 19:09:47.343836 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:47 crc kubenswrapper[4779]: I0929 19:09:47.343856 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:47Z","lastTransitionTime":"2025-09-29T19:09:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:47 crc kubenswrapper[4779]: I0929 19:09:47.446972 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:47 crc kubenswrapper[4779]: I0929 19:09:47.447104 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:47 crc kubenswrapper[4779]: I0929 19:09:47.447196 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:47 crc kubenswrapper[4779]: I0929 19:09:47.447222 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:47 crc kubenswrapper[4779]: I0929 19:09:47.447239 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:47Z","lastTransitionTime":"2025-09-29T19:09:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:47 crc kubenswrapper[4779]: I0929 19:09:47.550655 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:47 crc kubenswrapper[4779]: I0929 19:09:47.550730 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:47 crc kubenswrapper[4779]: I0929 19:09:47.550753 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:47 crc kubenswrapper[4779]: I0929 19:09:47.550782 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:47 crc kubenswrapper[4779]: I0929 19:09:47.550805 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:47Z","lastTransitionTime":"2025-09-29T19:09:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:47 crc kubenswrapper[4779]: I0929 19:09:47.654201 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:47 crc kubenswrapper[4779]: I0929 19:09:47.654255 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:47 crc kubenswrapper[4779]: I0929 19:09:47.654268 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:47 crc kubenswrapper[4779]: I0929 19:09:47.654284 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:47 crc kubenswrapper[4779]: I0929 19:09:47.654297 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:47Z","lastTransitionTime":"2025-09-29T19:09:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:47 crc kubenswrapper[4779]: I0929 19:09:47.757986 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:47 crc kubenswrapper[4779]: I0929 19:09:47.758037 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:47 crc kubenswrapper[4779]: I0929 19:09:47.758049 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:47 crc kubenswrapper[4779]: I0929 19:09:47.758072 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:47 crc kubenswrapper[4779]: I0929 19:09:47.758086 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:47Z","lastTransitionTime":"2025-09-29T19:09:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:47 crc kubenswrapper[4779]: I0929 19:09:47.862134 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:47 crc kubenswrapper[4779]: I0929 19:09:47.862224 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:47 crc kubenswrapper[4779]: I0929 19:09:47.862238 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:47 crc kubenswrapper[4779]: I0929 19:09:47.862259 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:47 crc kubenswrapper[4779]: I0929 19:09:47.862271 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:47Z","lastTransitionTime":"2025-09-29T19:09:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:47 crc kubenswrapper[4779]: I0929 19:09:47.965557 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:47 crc kubenswrapper[4779]: I0929 19:09:47.965633 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:47 crc kubenswrapper[4779]: I0929 19:09:47.965654 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:47 crc kubenswrapper[4779]: I0929 19:09:47.965680 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:47 crc kubenswrapper[4779]: I0929 19:09:47.965697 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:47Z","lastTransitionTime":"2025-09-29T19:09:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:48 crc kubenswrapper[4779]: I0929 19:09:48.068435 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:48 crc kubenswrapper[4779]: I0929 19:09:48.068503 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:48 crc kubenswrapper[4779]: I0929 19:09:48.068524 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:48 crc kubenswrapper[4779]: I0929 19:09:48.068550 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:48 crc kubenswrapper[4779]: I0929 19:09:48.068570 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:48Z","lastTransitionTime":"2025-09-29T19:09:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:48 crc kubenswrapper[4779]: I0929 19:09:48.109229 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 19:09:48 crc kubenswrapper[4779]: I0929 19:09:48.109694 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 19:09:48 crc kubenswrapper[4779]: I0929 19:09:48.109826 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 19:09:48 crc kubenswrapper[4779]: I0929 19:09:48.109971 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 19:09:48 crc kubenswrapper[4779]: I0929 19:09:48.110102 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T19:09:48Z","lastTransitionTime":"2025-09-29T19:09:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 19:09:48 crc kubenswrapper[4779]: I0929 19:09:48.165296 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-version/cluster-version-operator-5c965bbfc6-x2xjv"] Sep 29 19:09:48 crc kubenswrapper[4779]: I0929 19:09:48.165854 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-x2xjv" Sep 29 19:09:48 crc kubenswrapper[4779]: I0929 19:09:48.169831 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"openshift-service-ca.crt" Sep 29 19:09:48 crc kubenswrapper[4779]: I0929 19:09:48.170226 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"default-dockercfg-gxtc4" Sep 29 19:09:48 crc kubenswrapper[4779]: I0929 19:09:48.170939 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"kube-root-ca.crt" Sep 29 19:09:48 crc kubenswrapper[4779]: I0929 19:09:48.172403 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"cluster-version-operator-serving-cert" Sep 29 19:09:48 crc kubenswrapper[4779]: I0929 19:09:48.188777 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" podStartSLOduration=7.188753127 podStartE2EDuration="7.188753127s" podCreationTimestamp="2025-09-29 19:09:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 19:09:48.188143091 +0000 UTC m=+99.072568201" watchObservedRunningTime="2025-09-29 19:09:48.188753127 +0000 UTC m=+99.073178227" Sep 29 19:09:48 crc kubenswrapper[4779]: I0929 19:09:48.208929 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/7428a07a-0e48-4bcc-a3aa-9afff981c682-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-x2xjv\" (UID: \"7428a07a-0e48-4bcc-a3aa-9afff981c682\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-x2xjv" Sep 29 19:09:48 crc kubenswrapper[4779]: I0929 19:09:48.209282 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/7428a07a-0e48-4bcc-a3aa-9afff981c682-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-x2xjv\" (UID: \"7428a07a-0e48-4bcc-a3aa-9afff981c682\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-x2xjv" Sep 29 19:09:48 crc kubenswrapper[4779]: I0929 19:09:48.209530 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/7428a07a-0e48-4bcc-a3aa-9afff981c682-service-ca\") pod \"cluster-version-operator-5c965bbfc6-x2xjv\" (UID: \"7428a07a-0e48-4bcc-a3aa-9afff981c682\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-x2xjv" Sep 29 19:09:48 crc kubenswrapper[4779]: I0929 19:09:48.209676 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7428a07a-0e48-4bcc-a3aa-9afff981c682-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-x2xjv\" (UID: \"7428a07a-0e48-4bcc-a3aa-9afff981c682\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-x2xjv" Sep 29 19:09:48 crc kubenswrapper[4779]: I0929 19:09:48.209856 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/7428a07a-0e48-4bcc-a3aa-9afff981c682-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-x2xjv\" (UID: \"7428a07a-0e48-4bcc-a3aa-9afff981c682\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-x2xjv" Sep 29 19:09:48 crc kubenswrapper[4779]: I0929 19:09:48.311286 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7428a07a-0e48-4bcc-a3aa-9afff981c682-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-x2xjv\" (UID: \"7428a07a-0e48-4bcc-a3aa-9afff981c682\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-x2xjv" Sep 29 19:09:48 crc kubenswrapper[4779]: I0929 19:09:48.311363 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/7428a07a-0e48-4bcc-a3aa-9afff981c682-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-x2xjv\" (UID: \"7428a07a-0e48-4bcc-a3aa-9afff981c682\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-x2xjv" Sep 29 19:09:48 crc kubenswrapper[4779]: I0929 19:09:48.311396 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/7428a07a-0e48-4bcc-a3aa-9afff981c682-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-x2xjv\" (UID: \"7428a07a-0e48-4bcc-a3aa-9afff981c682\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-x2xjv" Sep 29 19:09:48 crc kubenswrapper[4779]: I0929 19:09:48.311422 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/7428a07a-0e48-4bcc-a3aa-9afff981c682-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-x2xjv\" (UID: \"7428a07a-0e48-4bcc-a3aa-9afff981c682\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-x2xjv" Sep 29 19:09:48 crc kubenswrapper[4779]: I0929 19:09:48.311479 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/7428a07a-0e48-4bcc-a3aa-9afff981c682-service-ca\") pod \"cluster-version-operator-5c965bbfc6-x2xjv\" (UID: \"7428a07a-0e48-4bcc-a3aa-9afff981c682\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-x2xjv" Sep 29 19:09:48 crc kubenswrapper[4779]: I0929 19:09:48.311727 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/7428a07a-0e48-4bcc-a3aa-9afff981c682-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-x2xjv\" (UID: \"7428a07a-0e48-4bcc-a3aa-9afff981c682\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-x2xjv" Sep 29 19:09:48 crc kubenswrapper[4779]: I0929 19:09:48.311999 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/7428a07a-0e48-4bcc-a3aa-9afff981c682-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-x2xjv\" (UID: \"7428a07a-0e48-4bcc-a3aa-9afff981c682\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-x2xjv" Sep 29 19:09:48 crc kubenswrapper[4779]: I0929 19:09:48.312515 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/7428a07a-0e48-4bcc-a3aa-9afff981c682-service-ca\") pod \"cluster-version-operator-5c965bbfc6-x2xjv\" (UID: \"7428a07a-0e48-4bcc-a3aa-9afff981c682\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-x2xjv" Sep 29 19:09:48 crc kubenswrapper[4779]: I0929 19:09:48.320695 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7428a07a-0e48-4bcc-a3aa-9afff981c682-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-x2xjv\" (UID: \"7428a07a-0e48-4bcc-a3aa-9afff981c682\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-x2xjv" Sep 29 19:09:48 crc kubenswrapper[4779]: I0929 19:09:48.330519 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/7428a07a-0e48-4bcc-a3aa-9afff981c682-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-x2xjv\" (UID: \"7428a07a-0e48-4bcc-a3aa-9afff981c682\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-x2xjv" Sep 29 19:09:48 crc kubenswrapper[4779]: I0929 19:09:48.484938 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-x2xjv" Sep 29 19:09:48 crc kubenswrapper[4779]: I0929 19:09:48.613910 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/4df079c4-34e3-4132-91bb-ad68488552f8-metrics-certs\") pod \"network-metrics-daemon-2rtwf\" (UID: \"4df079c4-34e3-4132-91bb-ad68488552f8\") " pod="openshift-multus/network-metrics-daemon-2rtwf" Sep 29 19:09:48 crc kubenswrapper[4779]: E0929 19:09:48.614036 4779 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Sep 29 19:09:48 crc kubenswrapper[4779]: E0929 19:09:48.614100 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/4df079c4-34e3-4132-91bb-ad68488552f8-metrics-certs podName:4df079c4-34e3-4132-91bb-ad68488552f8 nodeName:}" failed. No retries permitted until 2025-09-29 19:10:52.614085102 +0000 UTC m=+163.498510212 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/4df079c4-34e3-4132-91bb-ad68488552f8-metrics-certs") pod "network-metrics-daemon-2rtwf" (UID: "4df079c4-34e3-4132-91bb-ad68488552f8") : object "openshift-multus"/"metrics-daemon-secret" not registered Sep 29 19:09:48 crc kubenswrapper[4779]: I0929 19:09:48.765641 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 19:09:48 crc kubenswrapper[4779]: I0929 19:09:48.765683 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 19:09:48 crc kubenswrapper[4779]: I0929 19:09:48.765734 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 19:09:48 crc kubenswrapper[4779]: I0929 19:09:48.765801 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-2rtwf" Sep 29 19:09:48 crc kubenswrapper[4779]: E0929 19:09:48.765877 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 19:09:48 crc kubenswrapper[4779]: E0929 19:09:48.765990 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-2rtwf" podUID="4df079c4-34e3-4132-91bb-ad68488552f8" Sep 29 19:09:48 crc kubenswrapper[4779]: E0929 19:09:48.766063 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 19:09:48 crc kubenswrapper[4779]: E0929 19:09:48.766136 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 19:09:49 crc kubenswrapper[4779]: I0929 19:09:49.320737 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-x2xjv" event={"ID":"7428a07a-0e48-4bcc-a3aa-9afff981c682","Type":"ContainerStarted","Data":"3888e52b8fb9aea43b9157699e5355870bae86179fa318a9e2cf8608d09f308e"} Sep 29 19:09:49 crc kubenswrapper[4779]: I0929 19:09:49.320833 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-x2xjv" event={"ID":"7428a07a-0e48-4bcc-a3aa-9afff981c682","Type":"ContainerStarted","Data":"b24d0f9a75da89b332199bd511b7b2dd6bb4ee3a319dd2dd0f31209b7955f559"} Sep 29 19:09:49 crc kubenswrapper[4779]: I0929 19:09:49.341682 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-x2xjv" podStartSLOduration=80.341657176 podStartE2EDuration="1m20.341657176s" podCreationTimestamp="2025-09-29 19:08:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 19:09:49.339350834 +0000 UTC m=+100.223775964" watchObservedRunningTime="2025-09-29 19:09:49.341657176 +0000 UTC m=+100.226082316" Sep 29 19:09:50 crc kubenswrapper[4779]: I0929 19:09:50.765996 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 19:09:50 crc kubenswrapper[4779]: E0929 19:09:50.766200 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 19:09:50 crc kubenswrapper[4779]: I0929 19:09:50.766871 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 19:09:50 crc kubenswrapper[4779]: E0929 19:09:50.766968 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 19:09:50 crc kubenswrapper[4779]: I0929 19:09:50.767138 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-2rtwf" Sep 29 19:09:50 crc kubenswrapper[4779]: E0929 19:09:50.767214 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-2rtwf" podUID="4df079c4-34e3-4132-91bb-ad68488552f8" Sep 29 19:09:50 crc kubenswrapper[4779]: I0929 19:09:50.767364 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 19:09:50 crc kubenswrapper[4779]: E0929 19:09:50.767435 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 19:09:52 crc kubenswrapper[4779]: I0929 19:09:52.766240 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-2rtwf" Sep 29 19:09:52 crc kubenswrapper[4779]: I0929 19:09:52.766292 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 19:09:52 crc kubenswrapper[4779]: I0929 19:09:52.766258 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 19:09:52 crc kubenswrapper[4779]: I0929 19:09:52.766284 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 19:09:52 crc kubenswrapper[4779]: E0929 19:09:52.766622 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 19:09:52 crc kubenswrapper[4779]: E0929 19:09:52.766785 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-2rtwf" podUID="4df079c4-34e3-4132-91bb-ad68488552f8" Sep 29 19:09:52 crc kubenswrapper[4779]: E0929 19:09:52.766860 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 19:09:52 crc kubenswrapper[4779]: E0929 19:09:52.767006 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 19:09:54 crc kubenswrapper[4779]: I0929 19:09:54.766089 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-2rtwf" Sep 29 19:09:54 crc kubenswrapper[4779]: I0929 19:09:54.766173 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 19:09:54 crc kubenswrapper[4779]: I0929 19:09:54.766240 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 19:09:54 crc kubenswrapper[4779]: E0929 19:09:54.766625 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-2rtwf" podUID="4df079c4-34e3-4132-91bb-ad68488552f8" Sep 29 19:09:54 crc kubenswrapper[4779]: I0929 19:09:54.766123 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 19:09:54 crc kubenswrapper[4779]: E0929 19:09:54.766732 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 19:09:54 crc kubenswrapper[4779]: E0929 19:09:54.766929 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 19:09:54 crc kubenswrapper[4779]: E0929 19:09:54.767079 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 19:09:56 crc kubenswrapper[4779]: I0929 19:09:56.766073 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 19:09:56 crc kubenswrapper[4779]: I0929 19:09:56.766170 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-2rtwf" Sep 29 19:09:56 crc kubenswrapper[4779]: I0929 19:09:56.766183 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 19:09:56 crc kubenswrapper[4779]: E0929 19:09:56.766221 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 19:09:56 crc kubenswrapper[4779]: I0929 19:09:56.766100 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 19:09:56 crc kubenswrapper[4779]: E0929 19:09:56.766416 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 19:09:56 crc kubenswrapper[4779]: E0929 19:09:56.766441 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 19:09:56 crc kubenswrapper[4779]: E0929 19:09:56.766650 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-2rtwf" podUID="4df079c4-34e3-4132-91bb-ad68488552f8" Sep 29 19:09:56 crc kubenswrapper[4779]: I0929 19:09:56.767285 4779 scope.go:117] "RemoveContainer" containerID="2e04b0f18f7180f57e083d45291ba413fcda4de8f92aa53189b296e6bc5c3905" Sep 29 19:09:56 crc kubenswrapper[4779]: E0929 19:09:56.767459 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-42vjg_openshift-ovn-kubernetes(046df2ef-fb75-4d32-93e6-17b36af0a7c2)\"" pod="openshift-ovn-kubernetes/ovnkube-node-42vjg" podUID="046df2ef-fb75-4d32-93e6-17b36af0a7c2" Sep 29 19:09:58 crc kubenswrapper[4779]: I0929 19:09:58.765849 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 19:09:58 crc kubenswrapper[4779]: I0929 19:09:58.765878 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 19:09:58 crc kubenswrapper[4779]: I0929 19:09:58.765932 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 19:09:58 crc kubenswrapper[4779]: I0929 19:09:58.765963 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-2rtwf" Sep 29 19:09:58 crc kubenswrapper[4779]: E0929 19:09:58.766921 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 19:09:58 crc kubenswrapper[4779]: E0929 19:09:58.767190 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 19:09:58 crc kubenswrapper[4779]: E0929 19:09:58.767556 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 19:09:58 crc kubenswrapper[4779]: E0929 19:09:58.767741 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-2rtwf" podUID="4df079c4-34e3-4132-91bb-ad68488552f8" Sep 29 19:10:00 crc kubenswrapper[4779]: I0929 19:10:00.765091 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-2rtwf" Sep 29 19:10:00 crc kubenswrapper[4779]: I0929 19:10:00.765141 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 19:10:00 crc kubenswrapper[4779]: E0929 19:10:00.765220 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-2rtwf" podUID="4df079c4-34e3-4132-91bb-ad68488552f8" Sep 29 19:10:00 crc kubenswrapper[4779]: I0929 19:10:00.765107 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 19:10:00 crc kubenswrapper[4779]: I0929 19:10:00.765168 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 19:10:00 crc kubenswrapper[4779]: E0929 19:10:00.765385 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 19:10:00 crc kubenswrapper[4779]: E0929 19:10:00.765437 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 19:10:00 crc kubenswrapper[4779]: E0929 19:10:00.765506 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 19:10:02 crc kubenswrapper[4779]: I0929 19:10:02.766043 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 19:10:02 crc kubenswrapper[4779]: I0929 19:10:02.766109 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 19:10:02 crc kubenswrapper[4779]: E0929 19:10:02.767666 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 19:10:02 crc kubenswrapper[4779]: I0929 19:10:02.766136 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-2rtwf" Sep 29 19:10:02 crc kubenswrapper[4779]: E0929 19:10:02.767765 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 19:10:02 crc kubenswrapper[4779]: I0929 19:10:02.766102 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 19:10:02 crc kubenswrapper[4779]: E0929 19:10:02.767831 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-2rtwf" podUID="4df079c4-34e3-4132-91bb-ad68488552f8" Sep 29 19:10:02 crc kubenswrapper[4779]: E0929 19:10:02.767997 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 19:10:04 crc kubenswrapper[4779]: I0929 19:10:04.765361 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 19:10:04 crc kubenswrapper[4779]: I0929 19:10:04.765423 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 19:10:04 crc kubenswrapper[4779]: I0929 19:10:04.765532 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-2rtwf" Sep 29 19:10:04 crc kubenswrapper[4779]: E0929 19:10:04.765550 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 19:10:04 crc kubenswrapper[4779]: E0929 19:10:04.765721 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 19:10:04 crc kubenswrapper[4779]: E0929 19:10:04.765832 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-2rtwf" podUID="4df079c4-34e3-4132-91bb-ad68488552f8" Sep 29 19:10:04 crc kubenswrapper[4779]: I0929 19:10:04.766117 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 19:10:04 crc kubenswrapper[4779]: E0929 19:10:04.766243 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 19:10:05 crc kubenswrapper[4779]: I0929 19:10:05.382737 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-jfbb6_3ac24bbf-c37a-4253-be71-8d8f15cfd48e/kube-multus/1.log" Sep 29 19:10:05 crc kubenswrapper[4779]: I0929 19:10:05.383287 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-jfbb6_3ac24bbf-c37a-4253-be71-8d8f15cfd48e/kube-multus/0.log" Sep 29 19:10:05 crc kubenswrapper[4779]: I0929 19:10:05.383368 4779 generic.go:334] "Generic (PLEG): container finished" podID="3ac24bbf-c37a-4253-be71-8d8f15cfd48e" containerID="0a42b5ee3be8c80c6772c05f938a0f0be5896c66157b38f8b36cc3f9e03a950d" exitCode=1 Sep 29 19:10:05 crc kubenswrapper[4779]: I0929 19:10:05.383408 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-jfbb6" event={"ID":"3ac24bbf-c37a-4253-be71-8d8f15cfd48e","Type":"ContainerDied","Data":"0a42b5ee3be8c80c6772c05f938a0f0be5896c66157b38f8b36cc3f9e03a950d"} Sep 29 19:10:05 crc kubenswrapper[4779]: I0929 19:10:05.383451 4779 scope.go:117] "RemoveContainer" containerID="2ea6959ae00f851559cc059498a3a7848fbbb55f6a25ed82e38efe62a7ee85ca" Sep 29 19:10:05 crc kubenswrapper[4779]: I0929 19:10:05.383903 4779 scope.go:117] "RemoveContainer" containerID="0a42b5ee3be8c80c6772c05f938a0f0be5896c66157b38f8b36cc3f9e03a950d" Sep 29 19:10:05 crc kubenswrapper[4779]: E0929 19:10:05.384134 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-multus pod=multus-jfbb6_openshift-multus(3ac24bbf-c37a-4253-be71-8d8f15cfd48e)\"" pod="openshift-multus/multus-jfbb6" podUID="3ac24bbf-c37a-4253-be71-8d8f15cfd48e" Sep 29 19:10:06 crc kubenswrapper[4779]: I0929 19:10:06.389788 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-jfbb6_3ac24bbf-c37a-4253-be71-8d8f15cfd48e/kube-multus/1.log" Sep 29 19:10:06 crc kubenswrapper[4779]: I0929 19:10:06.765738 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 19:10:06 crc kubenswrapper[4779]: I0929 19:10:06.765830 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 19:10:06 crc kubenswrapper[4779]: I0929 19:10:06.765751 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 19:10:06 crc kubenswrapper[4779]: E0929 19:10:06.765932 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 19:10:06 crc kubenswrapper[4779]: E0929 19:10:06.766078 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 19:10:06 crc kubenswrapper[4779]: I0929 19:10:06.766164 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-2rtwf" Sep 29 19:10:06 crc kubenswrapper[4779]: E0929 19:10:06.766241 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 19:10:06 crc kubenswrapper[4779]: E0929 19:10:06.766453 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-2rtwf" podUID="4df079c4-34e3-4132-91bb-ad68488552f8" Sep 29 19:10:07 crc kubenswrapper[4779]: I0929 19:10:07.767009 4779 scope.go:117] "RemoveContainer" containerID="2e04b0f18f7180f57e083d45291ba413fcda4de8f92aa53189b296e6bc5c3905" Sep 29 19:10:07 crc kubenswrapper[4779]: E0929 19:10:07.767294 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-42vjg_openshift-ovn-kubernetes(046df2ef-fb75-4d32-93e6-17b36af0a7c2)\"" pod="openshift-ovn-kubernetes/ovnkube-node-42vjg" podUID="046df2ef-fb75-4d32-93e6-17b36af0a7c2" Sep 29 19:10:08 crc kubenswrapper[4779]: I0929 19:10:08.765429 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 19:10:08 crc kubenswrapper[4779]: I0929 19:10:08.765495 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 19:10:08 crc kubenswrapper[4779]: E0929 19:10:08.765546 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 19:10:08 crc kubenswrapper[4779]: I0929 19:10:08.765562 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-2rtwf" Sep 29 19:10:08 crc kubenswrapper[4779]: I0929 19:10:08.765516 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 19:10:08 crc kubenswrapper[4779]: E0929 19:10:08.765632 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 19:10:08 crc kubenswrapper[4779]: E0929 19:10:08.765712 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 19:10:08 crc kubenswrapper[4779]: E0929 19:10:08.765773 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-2rtwf" podUID="4df079c4-34e3-4132-91bb-ad68488552f8" Sep 29 19:10:09 crc kubenswrapper[4779]: E0929 19:10:09.747346 4779 kubelet_node_status.go:497] "Node not becoming ready in time after startup" Sep 29 19:10:09 crc kubenswrapper[4779]: E0929 19:10:09.861107 4779 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Sep 29 19:10:10 crc kubenswrapper[4779]: I0929 19:10:10.765121 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 19:10:10 crc kubenswrapper[4779]: I0929 19:10:10.765177 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 19:10:10 crc kubenswrapper[4779]: I0929 19:10:10.765267 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-2rtwf" Sep 29 19:10:10 crc kubenswrapper[4779]: E0929 19:10:10.765486 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 19:10:10 crc kubenswrapper[4779]: I0929 19:10:10.765574 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 19:10:10 crc kubenswrapper[4779]: E0929 19:10:10.765664 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 19:10:10 crc kubenswrapper[4779]: E0929 19:10:10.765848 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-2rtwf" podUID="4df079c4-34e3-4132-91bb-ad68488552f8" Sep 29 19:10:10 crc kubenswrapper[4779]: E0929 19:10:10.766015 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 19:10:12 crc kubenswrapper[4779]: I0929 19:10:12.765557 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 19:10:12 crc kubenswrapper[4779]: I0929 19:10:12.765578 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 19:10:12 crc kubenswrapper[4779]: I0929 19:10:12.765578 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 19:10:12 crc kubenswrapper[4779]: I0929 19:10:12.765602 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-2rtwf" Sep 29 19:10:12 crc kubenswrapper[4779]: E0929 19:10:12.766722 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 19:10:12 crc kubenswrapper[4779]: E0929 19:10:12.766899 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 19:10:12 crc kubenswrapper[4779]: E0929 19:10:12.766966 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 19:10:12 crc kubenswrapper[4779]: E0929 19:10:12.767277 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-2rtwf" podUID="4df079c4-34e3-4132-91bb-ad68488552f8" Sep 29 19:10:14 crc kubenswrapper[4779]: I0929 19:10:14.766099 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 19:10:14 crc kubenswrapper[4779]: I0929 19:10:14.766189 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 19:10:14 crc kubenswrapper[4779]: E0929 19:10:14.767398 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 19:10:14 crc kubenswrapper[4779]: I0929 19:10:14.766376 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-2rtwf" Sep 29 19:10:14 crc kubenswrapper[4779]: I0929 19:10:14.766277 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 19:10:14 crc kubenswrapper[4779]: E0929 19:10:14.767527 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-2rtwf" podUID="4df079c4-34e3-4132-91bb-ad68488552f8" Sep 29 19:10:14 crc kubenswrapper[4779]: E0929 19:10:14.767650 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 19:10:14 crc kubenswrapper[4779]: E0929 19:10:14.767246 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 19:10:14 crc kubenswrapper[4779]: E0929 19:10:14.863258 4779 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Sep 29 19:10:16 crc kubenswrapper[4779]: I0929 19:10:16.765141 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 19:10:16 crc kubenswrapper[4779]: I0929 19:10:16.765261 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-2rtwf" Sep 29 19:10:16 crc kubenswrapper[4779]: I0929 19:10:16.765364 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 19:10:16 crc kubenswrapper[4779]: I0929 19:10:16.765459 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 19:10:16 crc kubenswrapper[4779]: I0929 19:10:16.765552 4779 scope.go:117] "RemoveContainer" containerID="0a42b5ee3be8c80c6772c05f938a0f0be5896c66157b38f8b36cc3f9e03a950d" Sep 29 19:10:16 crc kubenswrapper[4779]: E0929 19:10:16.765644 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-2rtwf" podUID="4df079c4-34e3-4132-91bb-ad68488552f8" Sep 29 19:10:16 crc kubenswrapper[4779]: E0929 19:10:16.765780 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 19:10:16 crc kubenswrapper[4779]: E0929 19:10:16.765934 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 19:10:16 crc kubenswrapper[4779]: E0929 19:10:16.766125 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 19:10:17 crc kubenswrapper[4779]: I0929 19:10:17.439825 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-jfbb6_3ac24bbf-c37a-4253-be71-8d8f15cfd48e/kube-multus/1.log" Sep 29 19:10:17 crc kubenswrapper[4779]: I0929 19:10:17.440587 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-jfbb6" event={"ID":"3ac24bbf-c37a-4253-be71-8d8f15cfd48e","Type":"ContainerStarted","Data":"5c0164ef77822b4eb85e02d13c7c209f76c7e034e09c640eb410639572e171b5"} Sep 29 19:10:18 crc kubenswrapper[4779]: I0929 19:10:18.765656 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 19:10:18 crc kubenswrapper[4779]: I0929 19:10:18.765680 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 19:10:18 crc kubenswrapper[4779]: E0929 19:10:18.766539 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 19:10:18 crc kubenswrapper[4779]: I0929 19:10:18.765874 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-2rtwf" Sep 29 19:10:18 crc kubenswrapper[4779]: I0929 19:10:18.765717 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 19:10:18 crc kubenswrapper[4779]: E0929 19:10:18.766738 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 19:10:18 crc kubenswrapper[4779]: E0929 19:10:18.766811 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 19:10:18 crc kubenswrapper[4779]: E0929 19:10:18.766963 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-2rtwf" podUID="4df079c4-34e3-4132-91bb-ad68488552f8" Sep 29 19:10:19 crc kubenswrapper[4779]: E0929 19:10:19.864016 4779 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Sep 29 19:10:20 crc kubenswrapper[4779]: I0929 19:10:20.765858 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 19:10:20 crc kubenswrapper[4779]: E0929 19:10:20.766049 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 19:10:20 crc kubenswrapper[4779]: I0929 19:10:20.766110 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-2rtwf" Sep 29 19:10:20 crc kubenswrapper[4779]: I0929 19:10:20.766220 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 19:10:20 crc kubenswrapper[4779]: E0929 19:10:20.766442 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-2rtwf" podUID="4df079c4-34e3-4132-91bb-ad68488552f8" Sep 29 19:10:20 crc kubenswrapper[4779]: I0929 19:10:20.766633 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 19:10:20 crc kubenswrapper[4779]: E0929 19:10:20.767139 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 19:10:20 crc kubenswrapper[4779]: E0929 19:10:20.767348 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 19:10:20 crc kubenswrapper[4779]: I0929 19:10:20.767724 4779 scope.go:117] "RemoveContainer" containerID="2e04b0f18f7180f57e083d45291ba413fcda4de8f92aa53189b296e6bc5c3905" Sep 29 19:10:21 crc kubenswrapper[4779]: I0929 19:10:21.455717 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-42vjg_046df2ef-fb75-4d32-93e6-17b36af0a7c2/ovnkube-controller/3.log" Sep 29 19:10:21 crc kubenswrapper[4779]: I0929 19:10:21.459695 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-42vjg" event={"ID":"046df2ef-fb75-4d32-93e6-17b36af0a7c2","Type":"ContainerStarted","Data":"05fd1e4418540394817185026240ded324511397fc0990e09794511ca0417885"} Sep 29 19:10:22 crc kubenswrapper[4779]: I0929 19:10:22.270218 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-2rtwf"] Sep 29 19:10:22 crc kubenswrapper[4779]: I0929 19:10:22.271016 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-2rtwf" Sep 29 19:10:22 crc kubenswrapper[4779]: E0929 19:10:22.271144 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-2rtwf" podUID="4df079c4-34e3-4132-91bb-ad68488552f8" Sep 29 19:10:22 crc kubenswrapper[4779]: I0929 19:10:22.463406 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-42vjg" Sep 29 19:10:22 crc kubenswrapper[4779]: I0929 19:10:22.499302 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-42vjg" podStartSLOduration=113.49927958 podStartE2EDuration="1m53.49927958s" podCreationTimestamp="2025-09-29 19:08:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 19:10:22.498957358 +0000 UTC m=+133.383382488" watchObservedRunningTime="2025-09-29 19:10:22.49927958 +0000 UTC m=+133.383704690" Sep 29 19:10:22 crc kubenswrapper[4779]: I0929 19:10:22.765519 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 19:10:22 crc kubenswrapper[4779]: I0929 19:10:22.765605 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 19:10:22 crc kubenswrapper[4779]: E0929 19:10:22.765697 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 19:10:22 crc kubenswrapper[4779]: I0929 19:10:22.765612 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 19:10:22 crc kubenswrapper[4779]: E0929 19:10:22.765865 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 19:10:22 crc kubenswrapper[4779]: E0929 19:10:22.765972 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 19:10:23 crc kubenswrapper[4779]: I0929 19:10:23.765880 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-2rtwf" Sep 29 19:10:23 crc kubenswrapper[4779]: E0929 19:10:23.766080 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-2rtwf" podUID="4df079c4-34e3-4132-91bb-ad68488552f8" Sep 29 19:10:24 crc kubenswrapper[4779]: I0929 19:10:24.765987 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 19:10:24 crc kubenswrapper[4779]: I0929 19:10:24.766059 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 19:10:24 crc kubenswrapper[4779]: I0929 19:10:24.766018 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 19:10:24 crc kubenswrapper[4779]: E0929 19:10:24.766213 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 19:10:24 crc kubenswrapper[4779]: E0929 19:10:24.766379 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 19:10:24 crc kubenswrapper[4779]: E0929 19:10:24.766515 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 19:10:24 crc kubenswrapper[4779]: E0929 19:10:24.866613 4779 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Sep 29 19:10:25 crc kubenswrapper[4779]: I0929 19:10:25.765295 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-2rtwf" Sep 29 19:10:25 crc kubenswrapper[4779]: E0929 19:10:25.765572 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-2rtwf" podUID="4df079c4-34e3-4132-91bb-ad68488552f8" Sep 29 19:10:26 crc kubenswrapper[4779]: I0929 19:10:26.766057 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 19:10:26 crc kubenswrapper[4779]: I0929 19:10:26.766177 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 19:10:26 crc kubenswrapper[4779]: I0929 19:10:26.766057 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 19:10:26 crc kubenswrapper[4779]: E0929 19:10:26.766371 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 19:10:26 crc kubenswrapper[4779]: E0929 19:10:26.766478 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 19:10:26 crc kubenswrapper[4779]: E0929 19:10:26.766595 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 19:10:27 crc kubenswrapper[4779]: I0929 19:10:27.765721 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-2rtwf" Sep 29 19:10:27 crc kubenswrapper[4779]: E0929 19:10:27.765911 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-2rtwf" podUID="4df079c4-34e3-4132-91bb-ad68488552f8" Sep 29 19:10:28 crc kubenswrapper[4779]: I0929 19:10:28.765517 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 19:10:28 crc kubenswrapper[4779]: E0929 19:10:28.765720 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 19:10:28 crc kubenswrapper[4779]: I0929 19:10:28.765543 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 19:10:28 crc kubenswrapper[4779]: E0929 19:10:28.766036 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 19:10:28 crc kubenswrapper[4779]: I0929 19:10:28.766195 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 19:10:28 crc kubenswrapper[4779]: E0929 19:10:28.766355 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 19:10:29 crc kubenswrapper[4779]: I0929 19:10:29.765544 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-2rtwf" Sep 29 19:10:29 crc kubenswrapper[4779]: E0929 19:10:29.767459 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-2rtwf" podUID="4df079c4-34e3-4132-91bb-ad68488552f8" Sep 29 19:10:30 crc kubenswrapper[4779]: I0929 19:10:30.765688 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 19:10:30 crc kubenswrapper[4779]: I0929 19:10:30.765702 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 19:10:30 crc kubenswrapper[4779]: I0929 19:10:30.765961 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 19:10:30 crc kubenswrapper[4779]: I0929 19:10:30.768898 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"openshift-service-ca.crt" Sep 29 19:10:30 crc kubenswrapper[4779]: I0929 19:10:30.769125 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-console"/"networking-console-plugin" Sep 29 19:10:30 crc kubenswrapper[4779]: I0929 19:10:30.769238 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"kube-root-ca.crt" Sep 29 19:10:30 crc kubenswrapper[4779]: I0929 19:10:30.769309 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-console"/"networking-console-plugin-cert" Sep 29 19:10:31 crc kubenswrapper[4779]: I0929 19:10:31.765774 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-2rtwf" Sep 29 19:10:31 crc kubenswrapper[4779]: I0929 19:10:31.768157 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-secret" Sep 29 19:10:31 crc kubenswrapper[4779]: I0929 19:10:31.768830 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-sa-dockercfg-d427c" Sep 29 19:10:38 crc kubenswrapper[4779]: I0929 19:10:38.783616 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 19:10:38 crc kubenswrapper[4779]: E0929 19:10:38.783747 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 19:12:40.783722206 +0000 UTC m=+271.668147316 (durationBeforeRetry 2m2s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 19:10:38 crc kubenswrapper[4779]: I0929 19:10:38.783986 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 19:10:38 crc kubenswrapper[4779]: I0929 19:10:38.784011 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 19:10:38 crc kubenswrapper[4779]: I0929 19:10:38.784034 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 19:10:38 crc kubenswrapper[4779]: I0929 19:10:38.784072 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 19:10:38 crc kubenswrapper[4779]: I0929 19:10:38.785211 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 19:10:38 crc kubenswrapper[4779]: I0929 19:10:38.789197 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 19:10:38 crc kubenswrapper[4779]: I0929 19:10:38.789943 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 19:10:38 crc kubenswrapper[4779]: I0929 19:10:38.790155 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 19:10:38 crc kubenswrapper[4779]: I0929 19:10:38.894369 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 19:10:38 crc kubenswrapper[4779]: I0929 19:10:38.908638 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 19:10:38 crc kubenswrapper[4779]: I0929 19:10:38.914453 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeReady" Sep 29 19:10:38 crc kubenswrapper[4779]: I0929 19:10:38.921865 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 19:10:38 crc kubenswrapper[4779]: I0929 19:10:38.971547 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-qlvq5"] Sep 29 19:10:38 crc kubenswrapper[4779]: I0929 19:10:38.972282 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-qlvq5" Sep 29 19:10:38 crc kubenswrapper[4779]: I0929 19:10:38.972935 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-s9rfn"] Sep 29 19:10:38 crc kubenswrapper[4779]: I0929 19:10:38.973295 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-s9rfn" Sep 29 19:10:38 crc kubenswrapper[4779]: I0929 19:10:38.980668 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console-operator/console-operator-58897d9998-jh4mm"] Sep 29 19:10:38 crc kubenswrapper[4779]: I0929 19:10:38.981162 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-85d4c"] Sep 29 19:10:38 crc kubenswrapper[4779]: I0929 19:10:38.981206 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Sep 29 19:10:38 crc kubenswrapper[4779]: I0929 19:10:38.981547 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-85d4c" Sep 29 19:10:38 crc kubenswrapper[4779]: I0929 19:10:38.981584 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-jh4mm" Sep 29 19:10:38 crc kubenswrapper[4779]: I0929 19:10:38.981666 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Sep 29 19:10:38 crc kubenswrapper[4779]: I0929 19:10:38.982359 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Sep 29 19:10:38 crc kubenswrapper[4779]: I0929 19:10:38.982528 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Sep 29 19:10:38 crc kubenswrapper[4779]: I0929 19:10:38.982560 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Sep 29 19:10:38 crc kubenswrapper[4779]: I0929 19:10:38.982872 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Sep 29 19:10:38 crc kubenswrapper[4779]: I0929 19:10:38.982999 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Sep 29 19:10:38 crc kubenswrapper[4779]: I0929 19:10:38.983133 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Sep 29 19:10:38 crc kubenswrapper[4779]: I0929 19:10:38.983200 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Sep 29 19:10:38 crc kubenswrapper[4779]: I0929 19:10:38.983290 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Sep 29 19:10:38 crc kubenswrapper[4779]: I0929 19:10:38.983420 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-8528j"] Sep 29 19:10:38 crc kubenswrapper[4779]: I0929 19:10:38.983574 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Sep 29 19:10:38 crc kubenswrapper[4779]: I0929 19:10:38.983776 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-8528j" Sep 29 19:10:38 crc kubenswrapper[4779]: I0929 19:10:38.984158 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Sep 29 19:10:38 crc kubenswrapper[4779]: I0929 19:10:38.989375 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-ksgbj"] Sep 29 19:10:38 crc kubenswrapper[4779]: I0929 19:10:38.989946 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-lzrjb"] Sep 29 19:10:38 crc kubenswrapper[4779]: I0929 19:10:38.994080 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-66qtp"] Sep 29 19:10:38 crc kubenswrapper[4779]: I0929 19:10:38.994349 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-ksgbj" Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.021934 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"openshift-service-ca.crt" Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.022011 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-lzrjb" Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.022253 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"kube-root-ca.crt" Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.022404 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.022744 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-z2jm2"] Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.023052 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-bh65l"] Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.023410 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-f9d7485db-hhcst"] Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.023604 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"samples-operator-tls" Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.023775 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"kube-root-ca.crt" Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.023785 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-hhcst" Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.024126 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-machine-approver/machine-approver-56656f9798-n7hnm"] Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.024212 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-serving-cert" Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.024413 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-66qtp" Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.024593 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-dockercfg-vw8fw" Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.024637 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-n7hnm" Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.024786 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-z2jm2" Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.025041 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"serving-cert" Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.025067 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-bh65l" Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.025796 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"console-operator-dockercfg-4xjcr" Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.026172 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-nxttj"] Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.026635 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-nxttj" Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.027110 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"console-operator-config" Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.027188 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-config" Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.027591 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"kube-root-ca.crt" Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.027755 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-serving-cert" Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.027830 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-service-ca.crt" Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.027913 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-dockercfg-xtcjv" Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.027939 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-config" Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.028024 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"openshift-service-ca.crt" Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.028084 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"openshift-config-operator-dockercfg-7pc5z" Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.028172 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"cluster-samples-operator-dockercfg-xpp9w" Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.028243 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-service-ca.crt" Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.028313 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"openshift-service-ca.crt" Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.028596 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"kube-root-ca.crt" Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.028739 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"kube-root-ca.crt" Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.028841 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-oauth-config" Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.028882 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"openshift-service-ca.crt" Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.029018 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"console-config" Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.029123 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"openshift-service-ca.crt" Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.029147 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-lx98g"] Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.029431 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"openshift-service-ca.crt" Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.028856 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"config-operator-serving-cert" Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.029668 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-dockercfg-f62pw" Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.030057 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-pjjch"] Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.030650 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/downloads-7954f5f757-4vf95"] Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.030974 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-lx98g" Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.031154 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-pjjch" Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.031837 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-s9rfn"] Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.033045 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-tgwxw"] Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.035603 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-zkscm"] Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.035732 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-4vf95" Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.035780 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-tgwxw" Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.035873 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-9tvpg"] Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.041661 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress/router-default-5444994796-4bzgw"] Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.042011 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-d5hk2"] Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.042356 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-qlvq5"] Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.042379 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-85d4c"] Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.042391 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-jh4mm"] Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.042470 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-d5hk2" Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.035911 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-zkscm" Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.043168 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-9tvpg" Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.038245 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"kube-root-ca.crt" Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.043457 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-tls" Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.043739 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-rbac-proxy" Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.039106 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"trusted-ca" Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.042951 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"openshift-apiserver-sa-dockercfg-djjff" Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.044364 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"authentication-operator-config" Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.043127 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-dockercfg-mfbb7" Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.045095 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-tls" Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.045165 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-root-ca.crt" Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.045225 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"openshift-service-ca.crt" Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.045402 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"etcd-client" Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.045649 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-idp-0-file-data" Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.045719 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"config" Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.045723 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"serving-cert" Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.045132 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-error" Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.045834 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"oauth-serving-cert" Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.045919 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-operator-tls" Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.045997 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"authentication-operator-dockercfg-mz9bj" Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.046068 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"serving-cert" Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.046068 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"trusted-ca-bundle" Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.046071 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"cluster-image-registry-operator-dockercfg-m4qtx" Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.044505 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"machine-api-operator-images" Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.043494 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-4bzgw" Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.046329 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"encryption-config-1" Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.046397 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"kube-root-ca.crt" Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.045098 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"machine-approver-config" Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.046371 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"kube-root-ca.crt" Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.046669 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"service-ca" Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.046673 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-root-ca.crt" Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.046747 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-serving-cert" Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.046828 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"encryption-config-1" Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.046904 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"etcd-serving-ca" Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.046934 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"etcd-client" Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.046972 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"image-import-ca" Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.047014 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-rbac-proxy" Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.047045 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-sa-dockercfg-nl2j4" Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.047232 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"service-ca-bundle" Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.051396 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-px8c9"] Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.052181 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-bc7qj"] Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.052558 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-px8c9" Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.068127 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-bc7qj" Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.069690 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"oauth-openshift-dockercfg-znhcc" Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.069976 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"oauth-apiserver-sa-dockercfg-6r2bq" Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.070123 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"audit-1" Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.071185 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"audit-1" Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.071436 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-serving-cert" Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.071661 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"openshift-service-ca.crt" Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.071972 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-session" Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.077113 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"kube-root-ca.crt" Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.079491 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-lzrjb"] Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.091286 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-nrx92"] Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.091672 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-service-ca" Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.091845 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-fcw8k"] Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.091976 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"serving-cert" Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.092197 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-fcw8k" Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.092424 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-nrx92" Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.092971 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"openshift-service-ca.crt" Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.093183 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"openshift-service-ca.crt" Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.093405 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-provider-selection" Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.093960 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"kube-root-ca.crt" Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.093037 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/73ef5985-06f9-448f-ab77-7768c6b5face-config\") pod \"openshift-apiserver-operator-796bbdcf4f-8528j\" (UID: \"73ef5985-06f9-448f-ab77-7768c6b5face\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-8528j" Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.094253 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/73ef5985-06f9-448f-ab77-7768c6b5face-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-8528j\" (UID: \"73ef5985-06f9-448f-ab77-7768c6b5face\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-8528j" Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.094387 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8jkcp\" (UniqueName: \"kubernetes.io/projected/73ef5985-06f9-448f-ab77-7768c6b5face-kube-api-access-8jkcp\") pod \"openshift-apiserver-operator-796bbdcf4f-8528j\" (UID: \"73ef5985-06f9-448f-ab77-7768c6b5face\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-8528j" Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.095711 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-login" Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.098801 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"etcd-serving-ca" Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.100181 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-router-certs" Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.101033 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"trusted-ca-bundle" Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.106768 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"audit" Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.106964 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-cliconfig" Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.107640 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-bt79h"] Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.108876 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-bt79h" Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.111925 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-6js75"] Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.112779 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-6js75" Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.113208 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-trusted-ca-bundle" Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.113928 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"trusted-ca-bundle" Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.117797 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"trusted-ca-bundle" Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.118376 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-cbd6x"] Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.119085 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-ocp-branding-template" Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.119095 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-rrmfq"] Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.119157 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-cbd6x" Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.119539 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"installation-pull-secrets" Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.119563 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-rrmfq" Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.119577 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"trusted-ca" Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.122604 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-fpfpb"] Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.123149 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-hr49g"] Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.123623 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-wsbvn"] Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.123649 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-hr49g" Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.123682 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-fpfpb" Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.124078 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-wsbvn" Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.128005 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-s7xfz"] Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.128670 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319540-c6vdh"] Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.132285 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-s7xfz" Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.132660 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-kdxj8"] Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.132876 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319540-c6vdh" Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.136844 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-service-ca-bundle" Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.142009 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-h4zfh"] Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.143248 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-kdxj8" Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.159516 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-2n6g9"] Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.159959 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-6g6c5"] Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.160235 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-h4zfh" Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.160339 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-server-blwj9"] Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.160508 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-2n6g9" Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.161234 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-6g6c5" Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.161291 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-blwj9" Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.161500 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"registry-dockercfg-kzzsd" Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.164465 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-d5hk2"] Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.165209 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-ksgbj"] Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.173844 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-z2jm2"] Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.175134 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-tls" Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.181441 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-8528j"] Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.195429 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-root-ca.crt" Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.197899 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/3490980a-3437-48fe-9cad-aaf6cf84cc16-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-zkscm\" (UID: \"3490980a-3437-48fe-9cad-aaf6cf84cc16\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-zkscm" Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.198193 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h4jgn\" (UniqueName: \"kubernetes.io/projected/958cb75b-851b-4661-87e2-1eac8aa8e922-kube-api-access-h4jgn\") pod \"multus-admission-controller-857f4d67dd-nrx92\" (UID: \"958cb75b-851b-4661-87e2-1eac8aa8e922\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-nrx92" Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.198233 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/92115ba3-0309-4777-94c9-8a19dbdfc276-audit-dir\") pod \"apiserver-7bbb656c7d-lx98g\" (UID: \"92115ba3-0309-4777-94c9-8a19dbdfc276\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-lx98g" Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.198293 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/92115ba3-0309-4777-94c9-8a19dbdfc276-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-lx98g\" (UID: \"92115ba3-0309-4777-94c9-8a19dbdfc276\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-lx98g" Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.198481 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3490980a-3437-48fe-9cad-aaf6cf84cc16-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-zkscm\" (UID: \"3490980a-3437-48fe-9cad-aaf6cf84cc16\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-zkscm" Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.198533 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3490980a-3437-48fe-9cad-aaf6cf84cc16-config\") pod \"kube-apiserver-operator-766d6c64bb-zkscm\" (UID: \"3490980a-3437-48fe-9cad-aaf6cf84cc16\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-zkscm" Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.198623 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/958cb75b-851b-4661-87e2-1eac8aa8e922-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-nrx92\" (UID: \"958cb75b-851b-4661-87e2-1eac8aa8e922\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-nrx92" Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.198660 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r6925\" (UniqueName: \"kubernetes.io/projected/92115ba3-0309-4777-94c9-8a19dbdfc276-kube-api-access-r6925\") pod \"apiserver-7bbb656c7d-lx98g\" (UID: \"92115ba3-0309-4777-94c9-8a19dbdfc276\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-lx98g" Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.198729 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/92115ba3-0309-4777-94c9-8a19dbdfc276-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-lx98g\" (UID: \"92115ba3-0309-4777-94c9-8a19dbdfc276\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-lx98g" Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.198765 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/73ef5985-06f9-448f-ab77-7768c6b5face-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-8528j\" (UID: \"73ef5985-06f9-448f-ab77-7768c6b5face\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-8528j" Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.199256 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/92115ba3-0309-4777-94c9-8a19dbdfc276-encryption-config\") pod \"apiserver-7bbb656c7d-lx98g\" (UID: \"92115ba3-0309-4777-94c9-8a19dbdfc276\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-lx98g" Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.199302 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8jkcp\" (UniqueName: \"kubernetes.io/projected/73ef5985-06f9-448f-ab77-7768c6b5face-kube-api-access-8jkcp\") pod \"openshift-apiserver-operator-796bbdcf4f-8528j\" (UID: \"73ef5985-06f9-448f-ab77-7768c6b5face\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-8528j" Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.199354 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/92115ba3-0309-4777-94c9-8a19dbdfc276-serving-cert\") pod \"apiserver-7bbb656c7d-lx98g\" (UID: \"92115ba3-0309-4777-94c9-8a19dbdfc276\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-lx98g" Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.199392 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/92115ba3-0309-4777-94c9-8a19dbdfc276-etcd-client\") pod \"apiserver-7bbb656c7d-lx98g\" (UID: \"92115ba3-0309-4777-94c9-8a19dbdfc276\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-lx98g" Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.199420 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/73ef5985-06f9-448f-ab77-7768c6b5face-config\") pod \"openshift-apiserver-operator-796bbdcf4f-8528j\" (UID: \"73ef5985-06f9-448f-ab77-7768c6b5face\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-8528j" Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.199480 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/92115ba3-0309-4777-94c9-8a19dbdfc276-audit-policies\") pod \"apiserver-7bbb656c7d-lx98g\" (UID: \"92115ba3-0309-4777-94c9-8a19dbdfc276\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-lx98g" Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.201017 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/73ef5985-06f9-448f-ab77-7768c6b5face-config\") pod \"openshift-apiserver-operator-796bbdcf4f-8528j\" (UID: \"73ef5985-06f9-448f-ab77-7768c6b5face\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-8528j" Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.203916 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-lx98g"] Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.209497 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-bh65l"] Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.216496 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"openshift-service-ca.crt" Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.217883 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/73ef5985-06f9-448f-ab77-7768c6b5face-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-8528j\" (UID: \"73ef5985-06f9-448f-ab77-7768c6b5face\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-8528j" Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.221737 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-pjjch"] Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.227019 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-nxttj"] Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.229763 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-9tvpg"] Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.231424 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-px8c9"] Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.234051 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-bc7qj"] Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.234819 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-4vf95"] Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.235676 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-serving-cert" Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.239512 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-bt79h"] Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.245067 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-fcw8k"] Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.252057 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-hhcst"] Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.253248 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-kdxj8"] Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.254246 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-tgwxw"] Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.255638 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-66qtp"] Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.256699 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-zkscm"] Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.256702 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-client" Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.257859 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-h4zfh"] Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.258915 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-hr49g"] Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.259963 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-nrx92"] Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.261117 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319540-c6vdh"] Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.262222 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-2n6g9"] Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.263670 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-fpfpb"] Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.264872 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-cbd6x"] Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.265834 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-rrmfq"] Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.266939 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-s7xfz"] Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.268131 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-wsbvn"] Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.269408 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns/dns-default-q75j6"] Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.270166 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-b57sr"] Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.270359 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-q75j6" Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.271152 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-b57sr" Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.271283 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-6js75"] Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.272275 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-6g6c5"] Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.273350 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-q75j6"] Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.274697 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-dockercfg-r9srn" Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.274868 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-b57sr"] Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.276652 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-canary/ingress-canary-vvp2h"] Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.277158 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-vvp2h" Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.277648 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-vvp2h"] Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.294969 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"kube-root-ca.crt" Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.300095 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/92115ba3-0309-4777-94c9-8a19dbdfc276-encryption-config\") pod \"apiserver-7bbb656c7d-lx98g\" (UID: \"92115ba3-0309-4777-94c9-8a19dbdfc276\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-lx98g" Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.300144 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/92115ba3-0309-4777-94c9-8a19dbdfc276-serving-cert\") pod \"apiserver-7bbb656c7d-lx98g\" (UID: \"92115ba3-0309-4777-94c9-8a19dbdfc276\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-lx98g" Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.300183 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/92115ba3-0309-4777-94c9-8a19dbdfc276-etcd-client\") pod \"apiserver-7bbb656c7d-lx98g\" (UID: \"92115ba3-0309-4777-94c9-8a19dbdfc276\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-lx98g" Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.300208 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/92115ba3-0309-4777-94c9-8a19dbdfc276-audit-policies\") pod \"apiserver-7bbb656c7d-lx98g\" (UID: \"92115ba3-0309-4777-94c9-8a19dbdfc276\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-lx98g" Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.300236 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/3490980a-3437-48fe-9cad-aaf6cf84cc16-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-zkscm\" (UID: \"3490980a-3437-48fe-9cad-aaf6cf84cc16\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-zkscm" Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.300266 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h4jgn\" (UniqueName: \"kubernetes.io/projected/958cb75b-851b-4661-87e2-1eac8aa8e922-kube-api-access-h4jgn\") pod \"multus-admission-controller-857f4d67dd-nrx92\" (UID: \"958cb75b-851b-4661-87e2-1eac8aa8e922\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-nrx92" Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.300288 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/92115ba3-0309-4777-94c9-8a19dbdfc276-audit-dir\") pod \"apiserver-7bbb656c7d-lx98g\" (UID: \"92115ba3-0309-4777-94c9-8a19dbdfc276\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-lx98g" Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.300358 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/92115ba3-0309-4777-94c9-8a19dbdfc276-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-lx98g\" (UID: \"92115ba3-0309-4777-94c9-8a19dbdfc276\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-lx98g" Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.300485 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3490980a-3437-48fe-9cad-aaf6cf84cc16-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-zkscm\" (UID: \"3490980a-3437-48fe-9cad-aaf6cf84cc16\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-zkscm" Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.300512 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3490980a-3437-48fe-9cad-aaf6cf84cc16-config\") pod \"kube-apiserver-operator-766d6c64bb-zkscm\" (UID: \"3490980a-3437-48fe-9cad-aaf6cf84cc16\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-zkscm" Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.300534 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/958cb75b-851b-4661-87e2-1eac8aa8e922-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-nrx92\" (UID: \"958cb75b-851b-4661-87e2-1eac8aa8e922\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-nrx92" Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.300583 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r6925\" (UniqueName: \"kubernetes.io/projected/92115ba3-0309-4777-94c9-8a19dbdfc276-kube-api-access-r6925\") pod \"apiserver-7bbb656c7d-lx98g\" (UID: \"92115ba3-0309-4777-94c9-8a19dbdfc276\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-lx98g" Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.300606 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/92115ba3-0309-4777-94c9-8a19dbdfc276-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-lx98g\" (UID: \"92115ba3-0309-4777-94c9-8a19dbdfc276\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-lx98g" Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.300843 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/92115ba3-0309-4777-94c9-8a19dbdfc276-audit-dir\") pod \"apiserver-7bbb656c7d-lx98g\" (UID: \"92115ba3-0309-4777-94c9-8a19dbdfc276\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-lx98g" Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.301248 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/92115ba3-0309-4777-94c9-8a19dbdfc276-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-lx98g\" (UID: \"92115ba3-0309-4777-94c9-8a19dbdfc276\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-lx98g" Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.302185 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/92115ba3-0309-4777-94c9-8a19dbdfc276-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-lx98g\" (UID: \"92115ba3-0309-4777-94c9-8a19dbdfc276\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-lx98g" Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.302575 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/92115ba3-0309-4777-94c9-8a19dbdfc276-audit-policies\") pod \"apiserver-7bbb656c7d-lx98g\" (UID: \"92115ba3-0309-4777-94c9-8a19dbdfc276\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-lx98g" Sep 29 19:10:39 crc kubenswrapper[4779]: W0929 19:10:39.302767 4779 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5fe485a1_e14f_4c09_b5b9_f252bc42b7e8.slice/crio-5f90070941446a193155265a719c18f01aa450c53b392ec84cf7ec307ecdc766 WatchSource:0}: Error finding container 5f90070941446a193155265a719c18f01aa450c53b392ec84cf7ec307ecdc766: Status 404 returned error can't find the container with id 5f90070941446a193155265a719c18f01aa450c53b392ec84cf7ec307ecdc766 Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.303690 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/92115ba3-0309-4777-94c9-8a19dbdfc276-serving-cert\") pod \"apiserver-7bbb656c7d-lx98g\" (UID: \"92115ba3-0309-4777-94c9-8a19dbdfc276\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-lx98g" Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.304376 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/92115ba3-0309-4777-94c9-8a19dbdfc276-etcd-client\") pod \"apiserver-7bbb656c7d-lx98g\" (UID: \"92115ba3-0309-4777-94c9-8a19dbdfc276\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-lx98g" Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.307018 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/92115ba3-0309-4777-94c9-8a19dbdfc276-encryption-config\") pod \"apiserver-7bbb656c7d-lx98g\" (UID: \"92115ba3-0309-4777-94c9-8a19dbdfc276\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-lx98g" Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.314832 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"openshift-service-ca.crt" Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.334838 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-operator-config" Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.355437 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-ca-bundle" Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.375086 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-dockercfg-x57mr" Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.395001 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-serving-cert" Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.406775 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3490980a-3437-48fe-9cad-aaf6cf84cc16-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-zkscm\" (UID: \"3490980a-3437-48fe-9cad-aaf6cf84cc16\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-zkscm" Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.414752 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-config" Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.423055 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3490980a-3437-48fe-9cad-aaf6cf84cc16-config\") pod \"kube-apiserver-operator-766d6c64bb-zkscm\" (UID: \"3490980a-3437-48fe-9cad-aaf6cf84cc16\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-zkscm" Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.434269 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"metrics-tls" Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.455937 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"dns-operator-dockercfg-9mqw5" Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.476458 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"kube-root-ca.crt" Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.495764 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"default-dockercfg-chnjx" Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.532123 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"5f90070941446a193155265a719c18f01aa450c53b392ec84cf7ec307ecdc766"} Sep 29 19:10:39 crc kubenswrapper[4779]: W0929 19:10:39.533292 4779 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3b6479f0_333b_4a96_9adf_2099afdc2447.slice/crio-60ca596e54408ec9a214f856dac74863f076764e23ecf1ee712878d508f9d604 WatchSource:0}: Error finding container 60ca596e54408ec9a214f856dac74863f076764e23ecf1ee712878d508f9d604: Status 404 returned error can't find the container with id 60ca596e54408ec9a214f856dac74863f076764e23ecf1ee712878d508f9d604 Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.535754 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"openshift-service-ca.crt" Sep 29 19:10:39 crc kubenswrapper[4779]: W0929 19:10:39.544988 4779 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9d751cbb_f2e2_430d_9754_c882a5e924a5.slice/crio-6fcecdd41cbd3abba4537f7df4c59f8c306e9e58188007e1df2c651793e8dc0f WatchSource:0}: Error finding container 6fcecdd41cbd3abba4537f7df4c59f8c306e9e58188007e1df2c651793e8dc0f: Status 404 returned error can't find the container with id 6fcecdd41cbd3abba4537f7df4c59f8c306e9e58188007e1df2c651793e8dc0f Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.555539 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-stats-default" Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.574462 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-metrics-certs-default" Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.594910 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-dockercfg-zdk86" Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.615899 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"service-ca-bundle" Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.635605 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-certs-default" Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.655141 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"kube-root-ca.crt" Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.675047 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"machine-config-operator-images" Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.696219 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-operator-dockercfg-98p87" Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.715502 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mco-proxy-tls" Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.734807 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"kube-root-ca.crt" Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.754671 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-config" Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.775652 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"kube-scheduler-operator-serving-cert" Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.797058 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-dockercfg-qt55r" Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.815785 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-tls" Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.835302 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-admission-controller-secret" Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.850422 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/958cb75b-851b-4661-87e2-1eac8aa8e922-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-nrx92\" (UID: \"958cb75b-851b-4661-87e2-1eac8aa8e922\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-nrx92" Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.856271 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-dockercfg-k9rxt" Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.875366 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ac-dockercfg-9lkdf" Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.915459 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-metrics" Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.935885 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-dockercfg-5nsgg" Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.965586 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"marketplace-trusted-ca" Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.976143 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"kube-root-ca.crt" Sep 29 19:10:39 crc kubenswrapper[4779]: I0929 19:10:39.995480 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"openshift-service-ca.crt" Sep 29 19:10:40 crc kubenswrapper[4779]: I0929 19:10:40.016568 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serviceaccount-dockercfg-rq7zk" Sep 29 19:10:40 crc kubenswrapper[4779]: I0929 19:10:40.035568 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"catalog-operator-serving-cert" Sep 29 19:10:40 crc kubenswrapper[4779]: I0929 19:10:40.056378 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"pprof-cert" Sep 29 19:10:40 crc kubenswrapper[4779]: I0929 19:10:40.076986 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"kube-root-ca.crt" Sep 29 19:10:40 crc kubenswrapper[4779]: I0929 19:10:40.096103 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"openshift-service-ca.crt" Sep 29 19:10:40 crc kubenswrapper[4779]: I0929 19:10:40.115425 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-dockercfg-gkqpw" Sep 29 19:10:40 crc kubenswrapper[4779]: I0929 19:10:40.133201 4779 request.go:700] Waited for 1.013382123s due to client-side throttling, not priority and fairness, request: GET:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-controller-manager-operator/configmaps?fieldSelector=metadata.name%3Dkube-root-ca.crt&limit=500&resourceVersion=0 Sep 29 19:10:40 crc kubenswrapper[4779]: I0929 19:10:40.135628 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-root-ca.crt" Sep 29 19:10:40 crc kubenswrapper[4779]: I0929 19:10:40.154448 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-serving-cert" Sep 29 19:10:40 crc kubenswrapper[4779]: I0929 19:10:40.175574 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-config" Sep 29 19:10:40 crc kubenswrapper[4779]: I0929 19:10:40.195575 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-controller-dockercfg-c2lfx" Sep 29 19:10:40 crc kubenswrapper[4779]: I0929 19:10:40.216078 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mcc-proxy-tls" Sep 29 19:10:40 crc kubenswrapper[4779]: I0929 19:10:40.235596 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"packageserver-service-cert" Sep 29 19:10:40 crc kubenswrapper[4779]: I0929 19:10:40.256188 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"openshift-service-ca.crt" Sep 29 19:10:40 crc kubenswrapper[4779]: I0929 19:10:40.276308 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"openshift-service-ca.crt" Sep 29 19:10:40 crc kubenswrapper[4779]: I0929 19:10:40.297285 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"metrics-tls" Sep 29 19:10:40 crc kubenswrapper[4779]: I0929 19:10:40.327768 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"trusted-ca" Sep 29 19:10:40 crc kubenswrapper[4779]: I0929 19:10:40.335607 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"ingress-operator-dockercfg-7lnqk" Sep 29 19:10:40 crc kubenswrapper[4779]: I0929 19:10:40.356278 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"kube-root-ca.crt" Sep 29 19:10:40 crc kubenswrapper[4779]: I0929 19:10:40.376462 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator"/"kube-storage-version-migrator-sa-dockercfg-5xfcg" Sep 29 19:10:40 crc kubenswrapper[4779]: I0929 19:10:40.396739 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"kube-root-ca.crt" Sep 29 19:10:40 crc kubenswrapper[4779]: I0929 19:10:40.415446 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"openshift-service-ca.crt" Sep 29 19:10:40 crc kubenswrapper[4779]: I0929 19:10:40.435876 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"service-ca-operator-dockercfg-rg9jl" Sep 29 19:10:40 crc kubenswrapper[4779]: I0929 19:10:40.455645 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"serving-cert" Sep 29 19:10:40 crc kubenswrapper[4779]: I0929 19:10:40.474990 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"service-ca-operator-config" Sep 29 19:10:40 crc kubenswrapper[4779]: I0929 19:10:40.496587 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"kube-root-ca.crt" Sep 29 19:10:40 crc kubenswrapper[4779]: I0929 19:10:40.515010 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Sep 29 19:10:40 crc kubenswrapper[4779]: I0929 19:10:40.535220 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Sep 29 19:10:40 crc kubenswrapper[4779]: I0929 19:10:40.539963 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"5b9d239dc32efd1733ec245a502cc7be0b30d45dfa10ea79fda7dabce5532296"} Sep 29 19:10:40 crc kubenswrapper[4779]: I0929 19:10:40.541728 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"a54b85a2c9a27f4e87b6bc4e1b161796bd383b8f809cfea21cea2b398b68f2de"} Sep 29 19:10:40 crc kubenswrapper[4779]: I0929 19:10:40.541771 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"6fcecdd41cbd3abba4537f7df4c59f8c306e9e58188007e1df2c651793e8dc0f"} Sep 29 19:10:40 crc kubenswrapper[4779]: I0929 19:10:40.543904 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"817a8f02533bfd24ec9381cbe0ae83127473720459f2acfe63261e161f19f5ba"} Sep 29 19:10:40 crc kubenswrapper[4779]: I0929 19:10:40.543928 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"60ca596e54408ec9a214f856dac74863f076764e23ecf1ee712878d508f9d604"} Sep 29 19:10:40 crc kubenswrapper[4779]: I0929 19:10:40.544250 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 19:10:40 crc kubenswrapper[4779]: I0929 19:10:40.554848 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serving-cert" Sep 29 19:10:40 crc kubenswrapper[4779]: I0929 19:10:40.574781 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"package-server-manager-serving-cert" Sep 29 19:10:40 crc kubenswrapper[4779]: I0929 19:10:40.595015 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"config" Sep 29 19:10:40 crc kubenswrapper[4779]: I0929 19:10:40.615179 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"openshift-service-ca.crt" Sep 29 19:10:40 crc kubenswrapper[4779]: I0929 19:10:40.635308 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"serving-cert" Sep 29 19:10:40 crc kubenswrapper[4779]: I0929 19:10:40.656063 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"kube-root-ca.crt" Sep 29 19:10:40 crc kubenswrapper[4779]: I0929 19:10:40.675956 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"kube-storage-version-migrator-operator-dockercfg-2bh8d" Sep 29 19:10:40 crc kubenswrapper[4779]: I0929 19:10:40.696210 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"service-ca-dockercfg-pn86c" Sep 29 19:10:40 crc kubenswrapper[4779]: I0929 19:10:40.715790 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"signing-key" Sep 29 19:10:40 crc kubenswrapper[4779]: I0929 19:10:40.735980 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-dockercfg-qx5rd" Sep 29 19:10:40 crc kubenswrapper[4779]: I0929 19:10:40.755097 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-tls" Sep 29 19:10:40 crc kubenswrapper[4779]: I0929 19:10:40.775445 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"signing-cabundle" Sep 29 19:10:40 crc kubenswrapper[4779]: I0929 19:10:40.795860 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"kube-root-ca.crt" Sep 29 19:10:40 crc kubenswrapper[4779]: I0929 19:10:40.815393 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"node-bootstrapper-token" Sep 29 19:10:40 crc kubenswrapper[4779]: I0929 19:10:40.835196 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"openshift-service-ca.crt" Sep 29 19:10:40 crc kubenswrapper[4779]: I0929 19:10:40.875816 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-dockercfg-jwfmh" Sep 29 19:10:40 crc kubenswrapper[4779]: I0929 19:10:40.879106 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8jkcp\" (UniqueName: \"kubernetes.io/projected/73ef5985-06f9-448f-ab77-7768c6b5face-kube-api-access-8jkcp\") pod \"openshift-apiserver-operator-796bbdcf4f-8528j\" (UID: \"73ef5985-06f9-448f-ab77-7768c6b5face\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-8528j" Sep 29 19:10:40 crc kubenswrapper[4779]: I0929 19:10:40.896305 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-default-metrics-tls" Sep 29 19:10:40 crc kubenswrapper[4779]: I0929 19:10:40.914604 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"dns-default" Sep 29 19:10:40 crc kubenswrapper[4779]: I0929 19:10:40.935769 4779 reflector.go:368] Caches populated for *v1.Secret from object-"hostpath-provisioner"/"csi-hostpath-provisioner-sa-dockercfg-qd74k" Sep 29 19:10:40 crc kubenswrapper[4779]: I0929 19:10:40.955865 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"kube-root-ca.crt" Sep 29 19:10:40 crc kubenswrapper[4779]: I0929 19:10:40.975677 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"openshift-service-ca.crt" Sep 29 19:10:40 crc kubenswrapper[4779]: I0929 19:10:40.994796 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"default-dockercfg-2llfx" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.016261 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"canary-serving-cert" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.035599 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"kube-root-ca.crt" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.055334 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"openshift-service-ca.crt" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.092806 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r6925\" (UniqueName: \"kubernetes.io/projected/92115ba3-0309-4777-94c9-8a19dbdfc276-kube-api-access-r6925\") pod \"apiserver-7bbb656c7d-lx98g\" (UID: \"92115ba3-0309-4777-94c9-8a19dbdfc276\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-lx98g" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.119917 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h4jgn\" (UniqueName: \"kubernetes.io/projected/958cb75b-851b-4661-87e2-1eac8aa8e922-kube-api-access-h4jgn\") pod \"multus-admission-controller-857f4d67dd-nrx92\" (UID: \"958cb75b-851b-4661-87e2-1eac8aa8e922\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-nrx92" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.137103 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-nrx92" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.139912 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-8528j" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.140873 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/3490980a-3437-48fe-9cad-aaf6cf84cc16-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-zkscm\" (UID: \"3490980a-3437-48fe-9cad-aaf6cf84cc16\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-zkscm" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.153687 4779 request.go:700] Waited for 1.256445139s due to client-side throttling, not priority and fairness, request: GET:https://api-int.crc.testing:6443/api/v1/persistentvolumes/pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.220887 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/3197e1c4-03d1-42f1-8bee-87a97962cf70-client-ca\") pod \"route-controller-manager-6576b87f9c-s9rfn\" (UID: \"3197e1c4-03d1-42f1-8bee-87a97962cf70\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-s9rfn" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.220934 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3197e1c4-03d1-42f1-8bee-87a97962cf70-serving-cert\") pod \"route-controller-manager-6576b87f9c-s9rfn\" (UID: \"3197e1c4-03d1-42f1-8bee-87a97962cf70\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-s9rfn" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.220961 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-shpqc\" (UniqueName: \"kubernetes.io/projected/b3f6a87f-c596-4bfd-9330-13b5b8fabfe4-kube-api-access-shpqc\") pod \"machine-config-operator-74547568cd-px8c9\" (UID: \"b3f6a87f-c596-4bfd-9330-13b5b8fabfe4\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-px8c9" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.220989 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bmhfc\" (UniqueName: \"kubernetes.io/projected/8ed9d7da-9845-4c1a-8ec8-98f610a7cc9a-kube-api-access-bmhfc\") pod \"controller-manager-879f6c89f-qlvq5\" (UID: \"8ed9d7da-9845-4c1a-8ec8-98f610a7cc9a\") " pod="openshift-controller-manager/controller-manager-879f6c89f-qlvq5" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.221012 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/29b802ef-670a-46bb-9ad3-03bddd7dc682-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-pjjch\" (UID: \"29b802ef-670a-46bb-9ad3-03bddd7dc682\") " pod="openshift-authentication/oauth-openshift-558db77b4-pjjch" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.221038 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/05826cf2-7094-46a7-a08f-8f39f5fb3520-trusted-ca-bundle\") pod \"apiserver-76f77b778f-nxttj\" (UID: \"05826cf2-7094-46a7-a08f-8f39f5fb3520\") " pod="openshift-apiserver/apiserver-76f77b778f-nxttj" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.221061 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8b4pp\" (UniqueName: \"kubernetes.io/projected/308f2878-9a95-4b6a-8d03-90c431c05c1f-kube-api-access-8b4pp\") pod \"openshift-config-operator-7777fb866f-lzrjb\" (UID: \"308f2878-9a95-4b6a-8d03-90c431c05c1f\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-lzrjb" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.221083 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/8ed9d7da-9845-4c1a-8ec8-98f610a7cc9a-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-qlvq5\" (UID: \"8ed9d7da-9845-4c1a-8ec8-98f610a7cc9a\") " pod="openshift-controller-manager/controller-manager-879f6c89f-qlvq5" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.221105 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kqr2m\" (UniqueName: \"kubernetes.io/projected/52419e1c-e6c0-4225-95bf-da711c24e399-kube-api-access-kqr2m\") pod \"console-f9d7485db-hhcst\" (UID: \"52419e1c-e6c0-4225-95bf-da711c24e399\") " pod="openshift-console/console-f9d7485db-hhcst" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.221207 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/05826cf2-7094-46a7-a08f-8f39f5fb3520-image-import-ca\") pod \"apiserver-76f77b778f-nxttj\" (UID: \"05826cf2-7094-46a7-a08f-8f39f5fb3520\") " pod="openshift-apiserver/apiserver-76f77b778f-nxttj" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.221641 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/0ab1562e-a39e-4ddf-95ee-cf6ff520883d-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-66qtp\" (UID: \"0ab1562e-a39e-4ddf-95ee-cf6ff520883d\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-66qtp" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.221696 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/29b802ef-670a-46bb-9ad3-03bddd7dc682-audit-policies\") pod \"oauth-openshift-558db77b4-pjjch\" (UID: \"29b802ef-670a-46bb-9ad3-03bddd7dc682\") " pod="openshift-authentication/oauth-openshift-558db77b4-pjjch" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.221745 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/308f2878-9a95-4b6a-8d03-90c431c05c1f-available-featuregates\") pod \"openshift-config-operator-7777fb866f-lzrjb\" (UID: \"308f2878-9a95-4b6a-8d03-90c431c05c1f\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-lzrjb" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.221771 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6c791206-2b2a-4372-b1ba-dc98863e8dcd-config\") pod \"etcd-operator-b45778765-d5hk2\" (UID: \"6c791206-2b2a-4372-b1ba-dc98863e8dcd\") " pod="openshift-etcd-operator/etcd-operator-b45778765-d5hk2" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.221805 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/0ab1562e-a39e-4ddf-95ee-cf6ff520883d-images\") pod \"machine-api-operator-5694c8668f-66qtp\" (UID: \"0ab1562e-a39e-4ddf-95ee-cf6ff520883d\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-66qtp" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.221826 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/118f9ff6-7433-41ee-a6bd-84e14fd1ea98-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-ksgbj\" (UID: \"118f9ff6-7433-41ee-a6bd-84e14fd1ea98\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-ksgbj" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.221862 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-428jl\" (UniqueName: \"kubernetes.io/projected/08924ea4-79d3-439f-8bdb-150f807221d9-kube-api-access-428jl\") pod \"image-registry-697d97f7c8-tgwxw\" (UID: \"08924ea4-79d3-439f-8bdb-150f807221d9\") " pod="openshift-image-registry/image-registry-697d97f7c8-tgwxw" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.221888 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/b2767e5e-6a38-4668-95f6-f677e298c6f8-default-certificate\") pod \"router-default-5444994796-4bzgw\" (UID: \"b2767e5e-6a38-4668-95f6-f677e298c6f8\") " pod="openshift-ingress/router-default-5444994796-4bzgw" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.221912 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/25ae8232-dd23-4bbd-bae1-76ab7f9ce10c-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-z2jm2\" (UID: \"25ae8232-dd23-4bbd-bae1-76ab7f9ce10c\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-z2jm2" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.221929 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/336740f0-4088-499f-8090-b2c86ce4bf28-metrics-tls\") pod \"dns-operator-744455d44c-9tvpg\" (UID: \"336740f0-4088-499f-8090-b2c86ce4bf28\") " pod="openshift-dns-operator/dns-operator-744455d44c-9tvpg" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.221955 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/29b802ef-670a-46bb-9ad3-03bddd7dc682-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-pjjch\" (UID: \"29b802ef-670a-46bb-9ad3-03bddd7dc682\") " pod="openshift-authentication/oauth-openshift-558db77b4-pjjch" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.221980 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/29b802ef-670a-46bb-9ad3-03bddd7dc682-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-pjjch\" (UID: \"29b802ef-670a-46bb-9ad3-03bddd7dc682\") " pod="openshift-authentication/oauth-openshift-558db77b4-pjjch" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.222140 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/08924ea4-79d3-439f-8bdb-150f807221d9-registry-certificates\") pod \"image-registry-697d97f7c8-tgwxw\" (UID: \"08924ea4-79d3-439f-8bdb-150f807221d9\") " pod="openshift-image-registry/image-registry-697d97f7c8-tgwxw" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.222282 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fdf18ac4-14dd-4d17-9922-5db12fa07225-config\") pod \"machine-approver-56656f9798-n7hnm\" (UID: \"fdf18ac4-14dd-4d17-9922-5db12fa07225\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-n7hnm" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.222309 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/29b802ef-670a-46bb-9ad3-03bddd7dc682-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-pjjch\" (UID: \"29b802ef-670a-46bb-9ad3-03bddd7dc682\") " pod="openshift-authentication/oauth-openshift-558db77b4-pjjch" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.222352 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/05826cf2-7094-46a7-a08f-8f39f5fb3520-audit\") pod \"apiserver-76f77b778f-nxttj\" (UID: \"05826cf2-7094-46a7-a08f-8f39f5fb3520\") " pod="openshift-apiserver/apiserver-76f77b778f-nxttj" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.222411 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/29b802ef-670a-46bb-9ad3-03bddd7dc682-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-pjjch\" (UID: \"29b802ef-670a-46bb-9ad3-03bddd7dc682\") " pod="openshift-authentication/oauth-openshift-558db77b4-pjjch" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.222462 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/9e558fcb-c770-4581-a192-0f396bab99c7-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-bh65l\" (UID: \"9e558fcb-c770-4581-a192-0f396bab99c7\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-bh65l" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.222489 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/29b802ef-670a-46bb-9ad3-03bddd7dc682-audit-dir\") pod \"oauth-openshift-558db77b4-pjjch\" (UID: \"29b802ef-670a-46bb-9ad3-03bddd7dc682\") " pod="openshift-authentication/oauth-openshift-558db77b4-pjjch" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.222512 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/08924ea4-79d3-439f-8bdb-150f807221d9-ca-trust-extracted\") pod \"image-registry-697d97f7c8-tgwxw\" (UID: \"08924ea4-79d3-439f-8bdb-150f807221d9\") " pod="openshift-image-registry/image-registry-697d97f7c8-tgwxw" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.222535 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/05826cf2-7094-46a7-a08f-8f39f5fb3520-encryption-config\") pod \"apiserver-76f77b778f-nxttj\" (UID: \"05826cf2-7094-46a7-a08f-8f39f5fb3520\") " pod="openshift-apiserver/apiserver-76f77b778f-nxttj" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.222555 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9e558fcb-c770-4581-a192-0f396bab99c7-config\") pod \"authentication-operator-69f744f599-bh65l\" (UID: \"9e558fcb-c770-4581-a192-0f396bab99c7\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-bh65l" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.222578 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/29b802ef-670a-46bb-9ad3-03bddd7dc682-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-pjjch\" (UID: \"29b802ef-670a-46bb-9ad3-03bddd7dc682\") " pod="openshift-authentication/oauth-openshift-558db77b4-pjjch" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.222600 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/25ae8232-dd23-4bbd-bae1-76ab7f9ce10c-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-z2jm2\" (UID: \"25ae8232-dd23-4bbd-bae1-76ab7f9ce10c\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-z2jm2" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.222627 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/08924ea4-79d3-439f-8bdb-150f807221d9-registry-tls\") pod \"image-registry-697d97f7c8-tgwxw\" (UID: \"08924ea4-79d3-439f-8bdb-150f807221d9\") " pod="openshift-image-registry/image-registry-697d97f7c8-tgwxw" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.222648 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/08924ea4-79d3-439f-8bdb-150f807221d9-trusted-ca\") pod \"image-registry-697d97f7c8-tgwxw\" (UID: \"08924ea4-79d3-439f-8bdb-150f807221d9\") " pod="openshift-image-registry/image-registry-697d97f7c8-tgwxw" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.222676 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jdgxf\" (UniqueName: \"kubernetes.io/projected/e9fff169-c8a1-4062-9a2a-ab4c1e790c07-kube-api-access-jdgxf\") pod \"control-plane-machine-set-operator-78cbb6b69f-fcw8k\" (UID: \"e9fff169-c8a1-4062-9a2a-ab4c1e790c07\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-fcw8k" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.222776 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/6c791206-2b2a-4372-b1ba-dc98863e8dcd-etcd-client\") pod \"etcd-operator-b45778765-d5hk2\" (UID: \"6c791206-2b2a-4372-b1ba-dc98863e8dcd\") " pod="openshift-etcd-operator/etcd-operator-b45778765-d5hk2" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.222911 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/fdf18ac4-14dd-4d17-9922-5db12fa07225-machine-approver-tls\") pod \"machine-approver-56656f9798-n7hnm\" (UID: \"fdf18ac4-14dd-4d17-9922-5db12fa07225\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-n7hnm" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.222960 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-twng6\" (UniqueName: \"kubernetes.io/projected/9e558fcb-c770-4581-a192-0f396bab99c7-kube-api-access-twng6\") pod \"authentication-operator-69f744f599-bh65l\" (UID: \"9e558fcb-c770-4581-a192-0f396bab99c7\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-bh65l" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.222987 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/52419e1c-e6c0-4225-95bf-da711c24e399-console-serving-cert\") pod \"console-f9d7485db-hhcst\" (UID: \"52419e1c-e6c0-4225-95bf-da711c24e399\") " pod="openshift-console/console-f9d7485db-hhcst" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.223025 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/52419e1c-e6c0-4225-95bf-da711c24e399-console-oauth-config\") pod \"console-f9d7485db-hhcst\" (UID: \"52419e1c-e6c0-4225-95bf-da711c24e399\") " pod="openshift-console/console-f9d7485db-hhcst" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.223054 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/29b802ef-670a-46bb-9ad3-03bddd7dc682-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-pjjch\" (UID: \"29b802ef-670a-46bb-9ad3-03bddd7dc682\") " pod="openshift-authentication/oauth-openshift-558db77b4-pjjch" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.223077 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/b3f6a87f-c596-4bfd-9330-13b5b8fabfe4-images\") pod \"machine-config-operator-74547568cd-px8c9\" (UID: \"b3f6a87f-c596-4bfd-9330-13b5b8fabfe4\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-px8c9" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.223101 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/fd9b8ba1-513d-4c37-aba0-ce6856f5f55c-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-bc7qj\" (UID: \"fd9b8ba1-513d-4c37-aba0-ce6856f5f55c\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-bc7qj" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.223135 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hzsjx\" (UniqueName: \"kubernetes.io/projected/6c791206-2b2a-4372-b1ba-dc98863e8dcd-kube-api-access-hzsjx\") pod \"etcd-operator-b45778765-d5hk2\" (UID: \"6c791206-2b2a-4372-b1ba-dc98863e8dcd\") " pod="openshift-etcd-operator/etcd-operator-b45778765-d5hk2" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.223162 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/05826cf2-7094-46a7-a08f-8f39f5fb3520-etcd-client\") pod \"apiserver-76f77b778f-nxttj\" (UID: \"05826cf2-7094-46a7-a08f-8f39f5fb3520\") " pod="openshift-apiserver/apiserver-76f77b778f-nxttj" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.223210 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/e9fff169-c8a1-4062-9a2a-ab4c1e790c07-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-fcw8k\" (UID: \"e9fff169-c8a1-4062-9a2a-ab4c1e790c07\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-fcw8k" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.223249 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7dc6c9bd-e7c8-44c0-b65d-0e335cc3b134-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-85d4c\" (UID: \"7dc6c9bd-e7c8-44c0-b65d-0e335cc3b134\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-85d4c" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.223272 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7dc6c9bd-e7c8-44c0-b65d-0e335cc3b134-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-85d4c\" (UID: \"7dc6c9bd-e7c8-44c0-b65d-0e335cc3b134\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-85d4c" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.223297 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6c791206-2b2a-4372-b1ba-dc98863e8dcd-serving-cert\") pod \"etcd-operator-b45778765-d5hk2\" (UID: \"6c791206-2b2a-4372-b1ba-dc98863e8dcd\") " pod="openshift-etcd-operator/etcd-operator-b45778765-d5hk2" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.223368 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b2767e5e-6a38-4668-95f6-f677e298c6f8-service-ca-bundle\") pod \"router-default-5444994796-4bzgw\" (UID: \"b2767e5e-6a38-4668-95f6-f677e298c6f8\") " pod="openshift-ingress/router-default-5444994796-4bzgw" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.223408 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/308f2878-9a95-4b6a-8d03-90c431c05c1f-serving-cert\") pod \"openshift-config-operator-7777fb866f-lzrjb\" (UID: \"308f2878-9a95-4b6a-8d03-90c431c05c1f\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-lzrjb" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.223512 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-tgwxw\" (UID: \"08924ea4-79d3-439f-8bdb-150f807221d9\") " pod="openshift-image-registry/image-registry-697d97f7c8-tgwxw" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.223548 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cnpf9\" (UniqueName: \"kubernetes.io/projected/0ab1562e-a39e-4ddf-95ee-cf6ff520883d-kube-api-access-cnpf9\") pod \"machine-api-operator-5694c8668f-66qtp\" (UID: \"0ab1562e-a39e-4ddf-95ee-cf6ff520883d\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-66qtp" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.223578 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3197e1c4-03d1-42f1-8bee-87a97962cf70-config\") pod \"route-controller-manager-6576b87f9c-s9rfn\" (UID: \"3197e1c4-03d1-42f1-8bee-87a97962cf70\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-s9rfn" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.223609 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/29b802ef-670a-46bb-9ad3-03bddd7dc682-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-pjjch\" (UID: \"29b802ef-670a-46bb-9ad3-03bddd7dc682\") " pod="openshift-authentication/oauth-openshift-558db77b4-pjjch" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.223641 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1ae0fa99-1f97-4233-953b-fd63f9a6a418-config\") pod \"console-operator-58897d9998-jh4mm\" (UID: \"1ae0fa99-1f97-4233-953b-fd63f9a6a418\") " pod="openshift-console-operator/console-operator-58897d9998-jh4mm" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.223672 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/08924ea4-79d3-439f-8bdb-150f807221d9-installation-pull-secrets\") pod \"image-registry-697d97f7c8-tgwxw\" (UID: \"08924ea4-79d3-439f-8bdb-150f807221d9\") " pod="openshift-image-registry/image-registry-697d97f7c8-tgwxw" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.223707 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/05826cf2-7094-46a7-a08f-8f39f5fb3520-node-pullsecrets\") pod \"apiserver-76f77b778f-nxttj\" (UID: \"05826cf2-7094-46a7-a08f-8f39f5fb3520\") " pod="openshift-apiserver/apiserver-76f77b778f-nxttj" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.223737 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9e558fcb-c770-4581-a192-0f396bab99c7-serving-cert\") pod \"authentication-operator-69f744f599-bh65l\" (UID: \"9e558fcb-c770-4581-a192-0f396bab99c7\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-bh65l" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.223773 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/52419e1c-e6c0-4225-95bf-da711c24e399-console-config\") pod \"console-f9d7485db-hhcst\" (UID: \"52419e1c-e6c0-4225-95bf-da711c24e399\") " pod="openshift-console/console-f9d7485db-hhcst" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.223806 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/29b802ef-670a-46bb-9ad3-03bddd7dc682-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-pjjch\" (UID: \"29b802ef-670a-46bb-9ad3-03bddd7dc682\") " pod="openshift-authentication/oauth-openshift-558db77b4-pjjch" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.223841 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/05826cf2-7094-46a7-a08f-8f39f5fb3520-serving-cert\") pod \"apiserver-76f77b778f-nxttj\" (UID: \"05826cf2-7094-46a7-a08f-8f39f5fb3520\") " pod="openshift-apiserver/apiserver-76f77b778f-nxttj" Sep 29 19:10:41 crc kubenswrapper[4779]: E0929 19:10:41.223873 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 19:10:41.723856497 +0000 UTC m=+152.608281707 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-tgwxw" (UID: "08924ea4-79d3-439f-8bdb-150f807221d9") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.223901 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/b3f6a87f-c596-4bfd-9330-13b5b8fabfe4-proxy-tls\") pod \"machine-config-operator-74547568cd-px8c9\" (UID: \"b3f6a87f-c596-4bfd-9330-13b5b8fabfe4\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-px8c9" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.224070 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/1ae0fa99-1f97-4233-953b-fd63f9a6a418-trusted-ca\") pod \"console-operator-58897d9998-jh4mm\" (UID: \"1ae0fa99-1f97-4233-953b-fd63f9a6a418\") " pod="openshift-console-operator/console-operator-58897d9998-jh4mm" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.224124 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/05826cf2-7094-46a7-a08f-8f39f5fb3520-etcd-serving-ca\") pod \"apiserver-76f77b778f-nxttj\" (UID: \"05826cf2-7094-46a7-a08f-8f39f5fb3520\") " pod="openshift-apiserver/apiserver-76f77b778f-nxttj" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.224175 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/8ed9d7da-9845-4c1a-8ec8-98f610a7cc9a-client-ca\") pod \"controller-manager-879f6c89f-qlvq5\" (UID: \"8ed9d7da-9845-4c1a-8ec8-98f610a7cc9a\") " pod="openshift-controller-manager/controller-manager-879f6c89f-qlvq5" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.224204 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fd9b8ba1-513d-4c37-aba0-ce6856f5f55c-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-bc7qj\" (UID: \"fd9b8ba1-513d-4c37-aba0-ce6856f5f55c\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-bc7qj" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.224244 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tmdhq\" (UniqueName: \"kubernetes.io/projected/336740f0-4088-499f-8090-b2c86ce4bf28-kube-api-access-tmdhq\") pod \"dns-operator-744455d44c-9tvpg\" (UID: \"336740f0-4088-499f-8090-b2c86ce4bf28\") " pod="openshift-dns-operator/dns-operator-744455d44c-9tvpg" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.224275 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/b3f6a87f-c596-4bfd-9330-13b5b8fabfe4-auth-proxy-config\") pod \"machine-config-operator-74547568cd-px8c9\" (UID: \"b3f6a87f-c596-4bfd-9330-13b5b8fabfe4\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-px8c9" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.224301 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0ab1562e-a39e-4ddf-95ee-cf6ff520883d-config\") pod \"machine-api-operator-5694c8668f-66qtp\" (UID: \"0ab1562e-a39e-4ddf-95ee-cf6ff520883d\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-66qtp" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.224359 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cxmpw\" (UniqueName: \"kubernetes.io/projected/078d7a8c-aa40-42c3-b26e-30dd2a01cae0-kube-api-access-cxmpw\") pod \"downloads-7954f5f757-4vf95\" (UID: \"078d7a8c-aa40-42c3-b26e-30dd2a01cae0\") " pod="openshift-console/downloads-7954f5f757-4vf95" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.224392 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/52419e1c-e6c0-4225-95bf-da711c24e399-service-ca\") pod \"console-f9d7485db-hhcst\" (UID: \"52419e1c-e6c0-4225-95bf-da711c24e399\") " pod="openshift-console/console-f9d7485db-hhcst" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.224424 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/25ae8232-dd23-4bbd-bae1-76ab7f9ce10c-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-z2jm2\" (UID: \"25ae8232-dd23-4bbd-bae1-76ab7f9ce10c\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-z2jm2" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.224452 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/05826cf2-7094-46a7-a08f-8f39f5fb3520-config\") pod \"apiserver-76f77b778f-nxttj\" (UID: \"05826cf2-7094-46a7-a08f-8f39f5fb3520\") " pod="openshift-apiserver/apiserver-76f77b778f-nxttj" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.224481 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9wgpk\" (UniqueName: \"kubernetes.io/projected/7dc6c9bd-e7c8-44c0-b65d-0e335cc3b134-kube-api-access-9wgpk\") pod \"openshift-controller-manager-operator-756b6f6bc6-85d4c\" (UID: \"7dc6c9bd-e7c8-44c0-b65d-0e335cc3b134\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-85d4c" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.224514 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1ae0fa99-1f97-4233-953b-fd63f9a6a418-serving-cert\") pod \"console-operator-58897d9998-jh4mm\" (UID: \"1ae0fa99-1f97-4233-953b-fd63f9a6a418\") " pod="openshift-console-operator/console-operator-58897d9998-jh4mm" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.224544 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/6c791206-2b2a-4372-b1ba-dc98863e8dcd-etcd-ca\") pod \"etcd-operator-b45778765-d5hk2\" (UID: \"6c791206-2b2a-4372-b1ba-dc98863e8dcd\") " pod="openshift-etcd-operator/etcd-operator-b45778765-d5hk2" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.224576 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/08924ea4-79d3-439f-8bdb-150f807221d9-bound-sa-token\") pod \"image-registry-697d97f7c8-tgwxw\" (UID: \"08924ea4-79d3-439f-8bdb-150f807221d9\") " pod="openshift-image-registry/image-registry-697d97f7c8-tgwxw" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.224604 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8ed9d7da-9845-4c1a-8ec8-98f610a7cc9a-config\") pod \"controller-manager-879f6c89f-qlvq5\" (UID: \"8ed9d7da-9845-4c1a-8ec8-98f610a7cc9a\") " pod="openshift-controller-manager/controller-manager-879f6c89f-qlvq5" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.224645 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rx86b\" (UniqueName: \"kubernetes.io/projected/05826cf2-7094-46a7-a08f-8f39f5fb3520-kube-api-access-rx86b\") pod \"apiserver-76f77b778f-nxttj\" (UID: \"05826cf2-7094-46a7-a08f-8f39f5fb3520\") " pod="openshift-apiserver/apiserver-76f77b778f-nxttj" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.224686 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/52419e1c-e6c0-4225-95bf-da711c24e399-oauth-serving-cert\") pod \"console-f9d7485db-hhcst\" (UID: \"52419e1c-e6c0-4225-95bf-da711c24e399\") " pod="openshift-console/console-f9d7485db-hhcst" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.224711 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/b2767e5e-6a38-4668-95f6-f677e298c6f8-metrics-certs\") pod \"router-default-5444994796-4bzgw\" (UID: \"b2767e5e-6a38-4668-95f6-f677e298c6f8\") " pod="openshift-ingress/router-default-5444994796-4bzgw" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.224842 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-frpt8\" (UniqueName: \"kubernetes.io/projected/b2767e5e-6a38-4668-95f6-f677e298c6f8-kube-api-access-frpt8\") pod \"router-default-5444994796-4bzgw\" (UID: \"b2767e5e-6a38-4668-95f6-f677e298c6f8\") " pod="openshift-ingress/router-default-5444994796-4bzgw" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.224958 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/6c791206-2b2a-4372-b1ba-dc98863e8dcd-etcd-service-ca\") pod \"etcd-operator-b45778765-d5hk2\" (UID: \"6c791206-2b2a-4372-b1ba-dc98863e8dcd\") " pod="openshift-etcd-operator/etcd-operator-b45778765-d5hk2" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.225073 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/29b802ef-670a-46bb-9ad3-03bddd7dc682-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-pjjch\" (UID: \"29b802ef-670a-46bb-9ad3-03bddd7dc682\") " pod="openshift-authentication/oauth-openshift-558db77b4-pjjch" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.225128 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/9e558fcb-c770-4581-a192-0f396bab99c7-service-ca-bundle\") pod \"authentication-operator-69f744f599-bh65l\" (UID: \"9e558fcb-c770-4581-a192-0f396bab99c7\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-bh65l" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.225183 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/fd9b8ba1-513d-4c37-aba0-ce6856f5f55c-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-bc7qj\" (UID: \"fd9b8ba1-513d-4c37-aba0-ce6856f5f55c\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-bc7qj" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.225219 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/29b802ef-670a-46bb-9ad3-03bddd7dc682-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-pjjch\" (UID: \"29b802ef-670a-46bb-9ad3-03bddd7dc682\") " pod="openshift-authentication/oauth-openshift-558db77b4-pjjch" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.225283 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/52419e1c-e6c0-4225-95bf-da711c24e399-trusted-ca-bundle\") pod \"console-f9d7485db-hhcst\" (UID: \"52419e1c-e6c0-4225-95bf-da711c24e399\") " pod="openshift-console/console-f9d7485db-hhcst" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.225347 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8ed9d7da-9845-4c1a-8ec8-98f610a7cc9a-serving-cert\") pod \"controller-manager-879f6c89f-qlvq5\" (UID: \"8ed9d7da-9845-4c1a-8ec8-98f610a7cc9a\") " pod="openshift-controller-manager/controller-manager-879f6c89f-qlvq5" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.225401 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p649g\" (UniqueName: \"kubernetes.io/projected/3197e1c4-03d1-42f1-8bee-87a97962cf70-kube-api-access-p649g\") pod \"route-controller-manager-6576b87f9c-s9rfn\" (UID: \"3197e1c4-03d1-42f1-8bee-87a97962cf70\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-s9rfn" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.225453 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tf49f\" (UniqueName: \"kubernetes.io/projected/25ae8232-dd23-4bbd-bae1-76ab7f9ce10c-kube-api-access-tf49f\") pod \"cluster-image-registry-operator-dc59b4c8b-z2jm2\" (UID: \"25ae8232-dd23-4bbd-bae1-76ab7f9ce10c\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-z2jm2" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.225479 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fprlh\" (UniqueName: \"kubernetes.io/projected/fdf18ac4-14dd-4d17-9922-5db12fa07225-kube-api-access-fprlh\") pod \"machine-approver-56656f9798-n7hnm\" (UID: \"fdf18ac4-14dd-4d17-9922-5db12fa07225\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-n7hnm" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.225502 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6dcjg\" (UniqueName: \"kubernetes.io/projected/1ae0fa99-1f97-4233-953b-fd63f9a6a418-kube-api-access-6dcjg\") pod \"console-operator-58897d9998-jh4mm\" (UID: \"1ae0fa99-1f97-4233-953b-fd63f9a6a418\") " pod="openshift-console-operator/console-operator-58897d9998-jh4mm" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.225529 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q9n87\" (UniqueName: \"kubernetes.io/projected/29b802ef-670a-46bb-9ad3-03bddd7dc682-kube-api-access-q9n87\") pod \"oauth-openshift-558db77b4-pjjch\" (UID: \"29b802ef-670a-46bb-9ad3-03bddd7dc682\") " pod="openshift-authentication/oauth-openshift-558db77b4-pjjch" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.225556 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/b2767e5e-6a38-4668-95f6-f677e298c6f8-stats-auth\") pod \"router-default-5444994796-4bzgw\" (UID: \"b2767e5e-6a38-4668-95f6-f677e298c6f8\") " pod="openshift-ingress/router-default-5444994796-4bzgw" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.225589 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/05826cf2-7094-46a7-a08f-8f39f5fb3520-audit-dir\") pod \"apiserver-76f77b778f-nxttj\" (UID: \"05826cf2-7094-46a7-a08f-8f39f5fb3520\") " pod="openshift-apiserver/apiserver-76f77b778f-nxttj" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.225637 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nvn7f\" (UniqueName: \"kubernetes.io/projected/118f9ff6-7433-41ee-a6bd-84e14fd1ea98-kube-api-access-nvn7f\") pod \"cluster-samples-operator-665b6dd947-ksgbj\" (UID: \"118f9ff6-7433-41ee-a6bd-84e14fd1ea98\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-ksgbj" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.225713 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fdf18ac4-14dd-4d17-9922-5db12fa07225-auth-proxy-config\") pod \"machine-approver-56656f9798-n7hnm\" (UID: \"fdf18ac4-14dd-4d17-9922-5db12fa07225\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-n7hnm" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.326838 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 19:10:41 crc kubenswrapper[4779]: E0929 19:10:41.327014 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 19:10:41.826983896 +0000 UTC m=+152.711409026 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.327903 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/44f9f157-a816-4fd6-aff5-eccc66abb454-srv-cert\") pod \"olm-operator-6b444d44fb-kdxj8\" (UID: \"44f9f157-a816-4fd6-aff5-eccc66abb454\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-kdxj8" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.327984 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/7a54027e-55dc-42f9-aa21-1c82ec4d1b4b-tmpfs\") pod \"packageserver-d55dfcdfc-wsbvn\" (UID: \"7a54027e-55dc-42f9-aa21-1c82ec4d1b4b\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-wsbvn" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.328051 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/dc132003-e19f-4bdc-b77f-69dbe408b68f-plugins-dir\") pod \"csi-hostpathplugin-b57sr\" (UID: \"dc132003-e19f-4bdc-b77f-69dbe408b68f\") " pod="hostpath-provisioner/csi-hostpathplugin-b57sr" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.328149 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/05826cf2-7094-46a7-a08f-8f39f5fb3520-etcd-client\") pod \"apiserver-76f77b778f-nxttj\" (UID: \"05826cf2-7094-46a7-a08f-8f39f5fb3520\") " pod="openshift-apiserver/apiserver-76f77b778f-nxttj" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.328225 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/e9fff169-c8a1-4062-9a2a-ab4c1e790c07-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-fcw8k\" (UID: \"e9fff169-c8a1-4062-9a2a-ab4c1e790c07\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-fcw8k" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.328261 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7dc6c9bd-e7c8-44c0-b65d-0e335cc3b134-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-85d4c\" (UID: \"7dc6c9bd-e7c8-44c0-b65d-0e335cc3b134\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-85d4c" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.328369 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7dc6c9bd-e7c8-44c0-b65d-0e335cc3b134-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-85d4c\" (UID: \"7dc6c9bd-e7c8-44c0-b65d-0e335cc3b134\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-85d4c" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.328441 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6c791206-2b2a-4372-b1ba-dc98863e8dcd-serving-cert\") pod \"etcd-operator-b45778765-d5hk2\" (UID: \"6c791206-2b2a-4372-b1ba-dc98863e8dcd\") " pod="openshift-etcd-operator/etcd-operator-b45778765-d5hk2" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.328497 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b2767e5e-6a38-4668-95f6-f677e298c6f8-service-ca-bundle\") pod \"router-default-5444994796-4bzgw\" (UID: \"b2767e5e-6a38-4668-95f6-f677e298c6f8\") " pod="openshift-ingress/router-default-5444994796-4bzgw" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.328564 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bdpjp\" (UniqueName: \"kubernetes.io/projected/72bf75a9-d335-40b3-ad88-bc72472d4256-kube-api-access-bdpjp\") pod \"package-server-manager-789f6589d5-h4zfh\" (UID: \"72bf75a9-d335-40b3-ad88-bc72472d4256\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-h4zfh" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.328630 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/935082cd-101b-44e0-8315-186d166a1b2a-srv-cert\") pod \"catalog-operator-68c6474976-6js75\" (UID: \"935082cd-101b-44e0-8315-186d166a1b2a\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-6js75" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.328669 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3197e1c4-03d1-42f1-8bee-87a97962cf70-config\") pod \"route-controller-manager-6576b87f9c-s9rfn\" (UID: \"3197e1c4-03d1-42f1-8bee-87a97962cf70\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-s9rfn" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.328739 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/29b802ef-670a-46bb-9ad3-03bddd7dc682-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-pjjch\" (UID: \"29b802ef-670a-46bb-9ad3-03bddd7dc682\") " pod="openshift-authentication/oauth-openshift-558db77b4-pjjch" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.328936 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-28qm7\" (UniqueName: \"kubernetes.io/projected/9b795657-72fc-4b72-9186-f0dc24678b36-kube-api-access-28qm7\") pod \"migrator-59844c95c7-fpfpb\" (UID: \"9b795657-72fc-4b72-9186-f0dc24678b36\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-fpfpb" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.329013 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-tgwxw\" (UID: \"08924ea4-79d3-439f-8bdb-150f807221d9\") " pod="openshift-image-registry/image-registry-697d97f7c8-tgwxw" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.329086 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/308f2878-9a95-4b6a-8d03-90c431c05c1f-serving-cert\") pod \"openshift-config-operator-7777fb866f-lzrjb\" (UID: \"308f2878-9a95-4b6a-8d03-90c431c05c1f\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-lzrjb" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.329125 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cnpf9\" (UniqueName: \"kubernetes.io/projected/0ab1562e-a39e-4ddf-95ee-cf6ff520883d-kube-api-access-cnpf9\") pod \"machine-api-operator-5694c8668f-66qtp\" (UID: \"0ab1562e-a39e-4ddf-95ee-cf6ff520883d\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-66qtp" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.329196 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/dc132003-e19f-4bdc-b77f-69dbe408b68f-csi-data-dir\") pod \"csi-hostpathplugin-b57sr\" (UID: \"dc132003-e19f-4bdc-b77f-69dbe408b68f\") " pod="hostpath-provisioner/csi-hostpathplugin-b57sr" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.329273 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1ae0fa99-1f97-4233-953b-fd63f9a6a418-config\") pod \"console-operator-58897d9998-jh4mm\" (UID: \"1ae0fa99-1f97-4233-953b-fd63f9a6a418\") " pod="openshift-console-operator/console-operator-58897d9998-jh4mm" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.329311 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/08924ea4-79d3-439f-8bdb-150f807221d9-installation-pull-secrets\") pod \"image-registry-697d97f7c8-tgwxw\" (UID: \"08924ea4-79d3-439f-8bdb-150f807221d9\") " pod="openshift-image-registry/image-registry-697d97f7c8-tgwxw" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.329418 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/05826cf2-7094-46a7-a08f-8f39f5fb3520-node-pullsecrets\") pod \"apiserver-76f77b778f-nxttj\" (UID: \"05826cf2-7094-46a7-a08f-8f39f5fb3520\") " pod="openshift-apiserver/apiserver-76f77b778f-nxttj" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.329494 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/dc132003-e19f-4bdc-b77f-69dbe408b68f-registration-dir\") pod \"csi-hostpathplugin-b57sr\" (UID: \"dc132003-e19f-4bdc-b77f-69dbe408b68f\") " pod="hostpath-provisioner/csi-hostpathplugin-b57sr" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.329589 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f956c33-ffd9-4517-b8d2-febf5b12f2d0-bound-sa-token\") pod \"ingress-operator-5b745b69d9-hr49g\" (UID: \"8f956c33-ffd9-4517-b8d2-febf5b12f2d0\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-hr49g" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.329920 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7dc6c9bd-e7c8-44c0-b65d-0e335cc3b134-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-85d4c\" (UID: \"7dc6c9bd-e7c8-44c0-b65d-0e335cc3b134\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-85d4c" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.329954 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9e558fcb-c770-4581-a192-0f396bab99c7-serving-cert\") pod \"authentication-operator-69f744f599-bh65l\" (UID: \"9e558fcb-c770-4581-a192-0f396bab99c7\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-bh65l" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.330002 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b2767e5e-6a38-4668-95f6-f677e298c6f8-service-ca-bundle\") pod \"router-default-5444994796-4bzgw\" (UID: \"b2767e5e-6a38-4668-95f6-f677e298c6f8\") " pod="openshift-ingress/router-default-5444994796-4bzgw" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.330046 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/e5039b4e-b9b6-4d60-b5b6-c5d5c6765ac8-signing-key\") pod \"service-ca-9c57cc56f-6g6c5\" (UID: \"e5039b4e-b9b6-4d60-b5b6-c5d5c6765ac8\") " pod="openshift-service-ca/service-ca-9c57cc56f-6g6c5" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.330128 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gbdqs\" (UniqueName: \"kubernetes.io/projected/935082cd-101b-44e0-8315-186d166a1b2a-kube-api-access-gbdqs\") pod \"catalog-operator-68c6474976-6js75\" (UID: \"935082cd-101b-44e0-8315-186d166a1b2a\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-6js75" Sep 29 19:10:41 crc kubenswrapper[4779]: E0929 19:10:41.330233 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 19:10:41.830216975 +0000 UTC m=+152.714642185 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-tgwxw" (UID: "08924ea4-79d3-439f-8bdb-150f807221d9") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.330678 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1ae0fa99-1f97-4233-953b-fd63f9a6a418-config\") pod \"console-operator-58897d9998-jh4mm\" (UID: \"1ae0fa99-1f97-4233-953b-fd63f9a6a418\") " pod="openshift-console-operator/console-operator-58897d9998-jh4mm" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.330738 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/05826cf2-7094-46a7-a08f-8f39f5fb3520-node-pullsecrets\") pod \"apiserver-76f77b778f-nxttj\" (UID: \"05826cf2-7094-46a7-a08f-8f39f5fb3520\") " pod="openshift-apiserver/apiserver-76f77b778f-nxttj" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.330736 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/29b802ef-670a-46bb-9ad3-03bddd7dc682-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-pjjch\" (UID: \"29b802ef-670a-46bb-9ad3-03bddd7dc682\") " pod="openshift-authentication/oauth-openshift-558db77b4-pjjch" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.331911 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/52419e1c-e6c0-4225-95bf-da711c24e399-console-config\") pod \"console-f9d7485db-hhcst\" (UID: \"52419e1c-e6c0-4225-95bf-da711c24e399\") " pod="openshift-console/console-f9d7485db-hhcst" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.331925 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3197e1c4-03d1-42f1-8bee-87a97962cf70-config\") pod \"route-controller-manager-6576b87f9c-s9rfn\" (UID: \"3197e1c4-03d1-42f1-8bee-87a97962cf70\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-s9rfn" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.331944 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cgjmb\" (UniqueName: \"kubernetes.io/projected/0bf68edf-b009-46c6-bdc0-05c48967d5d6-kube-api-access-cgjmb\") pod \"service-ca-operator-777779d784-s7xfz\" (UID: \"0bf68edf-b009-46c6-bdc0-05c48967d5d6\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-s7xfz" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.331984 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/2d999e79-a467-4f19-a67a-f5993c6b4423-config-volume\") pod \"collect-profiles-29319540-c6vdh\" (UID: \"2d999e79-a467-4f19-a67a-f5993c6b4423\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319540-c6vdh" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.332006 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/29b802ef-670a-46bb-9ad3-03bddd7dc682-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-pjjch\" (UID: \"29b802ef-670a-46bb-9ad3-03bddd7dc682\") " pod="openshift-authentication/oauth-openshift-558db77b4-pjjch" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.332027 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/44f9f157-a816-4fd6-aff5-eccc66abb454-profile-collector-cert\") pod \"olm-operator-6b444d44fb-kdxj8\" (UID: \"44f9f157-a816-4fd6-aff5-eccc66abb454\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-kdxj8" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.332070 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/1ae0fa99-1f97-4233-953b-fd63f9a6a418-trusted-ca\") pod \"console-operator-58897d9998-jh4mm\" (UID: \"1ae0fa99-1f97-4233-953b-fd63f9a6a418\") " pod="openshift-console-operator/console-operator-58897d9998-jh4mm" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.332092 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01f30d6b-1381-4cd8-9a61-ed94d536d2a2-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-2n6g9\" (UID: \"01f30d6b-1381-4cd8-9a61-ed94d536d2a2\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-2n6g9" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.332132 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/05826cf2-7094-46a7-a08f-8f39f5fb3520-serving-cert\") pod \"apiserver-76f77b778f-nxttj\" (UID: \"05826cf2-7094-46a7-a08f-8f39f5fb3520\") " pod="openshift-apiserver/apiserver-76f77b778f-nxttj" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.332158 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/b3f6a87f-c596-4bfd-9330-13b5b8fabfe4-proxy-tls\") pod \"machine-config-operator-74547568cd-px8c9\" (UID: \"b3f6a87f-c596-4bfd-9330-13b5b8fabfe4\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-px8c9" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.332175 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/05826cf2-7094-46a7-a08f-8f39f5fb3520-etcd-serving-ca\") pod \"apiserver-76f77b778f-nxttj\" (UID: \"05826cf2-7094-46a7-a08f-8f39f5fb3520\") " pod="openshift-apiserver/apiserver-76f77b778f-nxttj" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.332212 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/8ed9d7da-9845-4c1a-8ec8-98f610a7cc9a-client-ca\") pod \"controller-manager-879f6c89f-qlvq5\" (UID: \"8ed9d7da-9845-4c1a-8ec8-98f610a7cc9a\") " pod="openshift-controller-manager/controller-manager-879f6c89f-qlvq5" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.332233 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fd9b8ba1-513d-4c37-aba0-ce6856f5f55c-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-bc7qj\" (UID: \"fd9b8ba1-513d-4c37-aba0-ce6856f5f55c\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-bc7qj" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.332262 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/935082cd-101b-44e0-8315-186d166a1b2a-profile-collector-cert\") pod \"catalog-operator-68c6474976-6js75\" (UID: \"935082cd-101b-44e0-8315-186d166a1b2a\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-6js75" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.332295 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w468h\" (UniqueName: \"kubernetes.io/projected/8f956c33-ffd9-4517-b8d2-febf5b12f2d0-kube-api-access-w468h\") pod \"ingress-operator-5b745b69d9-hr49g\" (UID: \"8f956c33-ffd9-4517-b8d2-febf5b12f2d0\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-hr49g" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.332349 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tmdhq\" (UniqueName: \"kubernetes.io/projected/336740f0-4088-499f-8090-b2c86ce4bf28-kube-api-access-tmdhq\") pod \"dns-operator-744455d44c-9tvpg\" (UID: \"336740f0-4088-499f-8090-b2c86ce4bf28\") " pod="openshift-dns-operator/dns-operator-744455d44c-9tvpg" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.332383 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f956c33-ffd9-4517-b8d2-febf5b12f2d0-trusted-ca\") pod \"ingress-operator-5b745b69d9-hr49g\" (UID: \"8f956c33-ffd9-4517-b8d2-febf5b12f2d0\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-hr49g" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.332403 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/b3f6a87f-c596-4bfd-9330-13b5b8fabfe4-auth-proxy-config\") pod \"machine-config-operator-74547568cd-px8c9\" (UID: \"b3f6a87f-c596-4bfd-9330-13b5b8fabfe4\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-px8c9" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.332443 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0ab1562e-a39e-4ddf-95ee-cf6ff520883d-config\") pod \"machine-api-operator-5694c8668f-66qtp\" (UID: \"0ab1562e-a39e-4ddf-95ee-cf6ff520883d\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-66qtp" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.332461 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cxmpw\" (UniqueName: \"kubernetes.io/projected/078d7a8c-aa40-42c3-b26e-30dd2a01cae0-kube-api-access-cxmpw\") pod \"downloads-7954f5f757-4vf95\" (UID: \"078d7a8c-aa40-42c3-b26e-30dd2a01cae0\") " pod="openshift-console/downloads-7954f5f757-4vf95" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.332480 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/52419e1c-e6c0-4225-95bf-da711c24e399-service-ca\") pod \"console-f9d7485db-hhcst\" (UID: \"52419e1c-e6c0-4225-95bf-da711c24e399\") " pod="openshift-console/console-f9d7485db-hhcst" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.332519 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a64632ed-ba47-4cfb-96a9-349b32995c3a-metrics-tls\") pod \"dns-default-q75j6\" (UID: \"a64632ed-ba47-4cfb-96a9-349b32995c3a\") " pod="openshift-dns/dns-default-q75j6" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.332541 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/25ae8232-dd23-4bbd-bae1-76ab7f9ce10c-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-z2jm2\" (UID: \"25ae8232-dd23-4bbd-bae1-76ab7f9ce10c\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-z2jm2" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.332563 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/014a4efc-c5f5-42c9-b90f-557a51659e67-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-rrmfq\" (UID: \"014a4efc-c5f5-42c9-b90f-557a51659e67\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-rrmfq" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.332601 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/dc132003-e19f-4bdc-b77f-69dbe408b68f-mountpoint-dir\") pod \"csi-hostpathplugin-b57sr\" (UID: \"dc132003-e19f-4bdc-b77f-69dbe408b68f\") " pod="hostpath-provisioner/csi-hostpathplugin-b57sr" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.332618 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1ae0fa99-1f97-4233-953b-fd63f9a6a418-serving-cert\") pod \"console-operator-58897d9998-jh4mm\" (UID: \"1ae0fa99-1f97-4233-953b-fd63f9a6a418\") " pod="openshift-console-operator/console-operator-58897d9998-jh4mm" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.332636 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/2d999e79-a467-4f19-a67a-f5993c6b4423-secret-volume\") pod \"collect-profiles-29319540-c6vdh\" (UID: \"2d999e79-a467-4f19-a67a-f5993c6b4423\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319540-c6vdh" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.332675 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/05826cf2-7094-46a7-a08f-8f39f5fb3520-config\") pod \"apiserver-76f77b778f-nxttj\" (UID: \"05826cf2-7094-46a7-a08f-8f39f5fb3520\") " pod="openshift-apiserver/apiserver-76f77b778f-nxttj" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.332693 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9wgpk\" (UniqueName: \"kubernetes.io/projected/7dc6c9bd-e7c8-44c0-b65d-0e335cc3b134-kube-api-access-9wgpk\") pod \"openshift-controller-manager-operator-756b6f6bc6-85d4c\" (UID: \"7dc6c9bd-e7c8-44c0-b65d-0e335cc3b134\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-85d4c" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.332710 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/6c791206-2b2a-4372-b1ba-dc98863e8dcd-etcd-ca\") pod \"etcd-operator-b45778765-d5hk2\" (UID: \"6c791206-2b2a-4372-b1ba-dc98863e8dcd\") " pod="openshift-etcd-operator/etcd-operator-b45778765-d5hk2" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.332726 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/014a4efc-c5f5-42c9-b90f-557a51659e67-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-rrmfq\" (UID: \"014a4efc-c5f5-42c9-b90f-557a51659e67\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-rrmfq" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.332776 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/08924ea4-79d3-439f-8bdb-150f807221d9-bound-sa-token\") pod \"image-registry-697d97f7c8-tgwxw\" (UID: \"08924ea4-79d3-439f-8bdb-150f807221d9\") " pod="openshift-image-registry/image-registry-697d97f7c8-tgwxw" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.332792 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8ed9d7da-9845-4c1a-8ec8-98f610a7cc9a-config\") pod \"controller-manager-879f6c89f-qlvq5\" (UID: \"8ed9d7da-9845-4c1a-8ec8-98f610a7cc9a\") " pod="openshift-controller-manager/controller-manager-879f6c89f-qlvq5" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.332832 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rx86b\" (UniqueName: \"kubernetes.io/projected/05826cf2-7094-46a7-a08f-8f39f5fb3520-kube-api-access-rx86b\") pod \"apiserver-76f77b778f-nxttj\" (UID: \"05826cf2-7094-46a7-a08f-8f39f5fb3520\") " pod="openshift-apiserver/apiserver-76f77b778f-nxttj" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.332875 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/52419e1c-e6c0-4225-95bf-da711c24e399-oauth-serving-cert\") pod \"console-f9d7485db-hhcst\" (UID: \"52419e1c-e6c0-4225-95bf-da711c24e399\") " pod="openshift-console/console-f9d7485db-hhcst" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.332928 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ttbsb\" (UniqueName: \"kubernetes.io/projected/e5039b4e-b9b6-4d60-b5b6-c5d5c6765ac8-kube-api-access-ttbsb\") pod \"service-ca-9c57cc56f-6g6c5\" (UID: \"e5039b4e-b9b6-4d60-b5b6-c5d5c6765ac8\") " pod="openshift-service-ca/service-ca-9c57cc56f-6g6c5" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.332955 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/b2767e5e-6a38-4668-95f6-f677e298c6f8-metrics-certs\") pod \"router-default-5444994796-4bzgw\" (UID: \"b2767e5e-6a38-4668-95f6-f677e298c6f8\") " pod="openshift-ingress/router-default-5444994796-4bzgw" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.333012 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-frpt8\" (UniqueName: \"kubernetes.io/projected/b2767e5e-6a38-4668-95f6-f677e298c6f8-kube-api-access-frpt8\") pod \"router-default-5444994796-4bzgw\" (UID: \"b2767e5e-6a38-4668-95f6-f677e298c6f8\") " pod="openshift-ingress/router-default-5444994796-4bzgw" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.333048 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/6c791206-2b2a-4372-b1ba-dc98863e8dcd-etcd-service-ca\") pod \"etcd-operator-b45778765-d5hk2\" (UID: \"6c791206-2b2a-4372-b1ba-dc98863e8dcd\") " pod="openshift-etcd-operator/etcd-operator-b45778765-d5hk2" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.333072 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/8ed9d7da-9845-4c1a-8ec8-98f610a7cc9a-client-ca\") pod \"controller-manager-879f6c89f-qlvq5\" (UID: \"8ed9d7da-9845-4c1a-8ec8-98f610a7cc9a\") " pod="openshift-controller-manager/controller-manager-879f6c89f-qlvq5" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.333099 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0bf68edf-b009-46c6-bdc0-05c48967d5d6-serving-cert\") pod \"service-ca-operator-777779d784-s7xfz\" (UID: \"0bf68edf-b009-46c6-bdc0-05c48967d5d6\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-s7xfz" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.333129 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x4ztn\" (UniqueName: \"kubernetes.io/projected/0b00b35e-d4f2-4be2-9bd1-e9aa6c617de7-kube-api-access-x4ztn\") pod \"machine-config-server-blwj9\" (UID: \"0b00b35e-d4f2-4be2-9bd1-e9aa6c617de7\") " pod="openshift-machine-config-operator/machine-config-server-blwj9" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.333167 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/9e558fcb-c770-4581-a192-0f396bab99c7-service-ca-bundle\") pod \"authentication-operator-69f744f599-bh65l\" (UID: \"9e558fcb-c770-4581-a192-0f396bab99c7\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-bh65l" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.333186 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/29b802ef-670a-46bb-9ad3-03bddd7dc682-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-pjjch\" (UID: \"29b802ef-670a-46bb-9ad3-03bddd7dc682\") " pod="openshift-authentication/oauth-openshift-558db77b4-pjjch" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.333202 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b55hr\" (UniqueName: \"kubernetes.io/projected/44f9f157-a816-4fd6-aff5-eccc66abb454-kube-api-access-b55hr\") pod \"olm-operator-6b444d44fb-kdxj8\" (UID: \"44f9f157-a816-4fd6-aff5-eccc66abb454\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-kdxj8" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.333223 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/dc132003-e19f-4bdc-b77f-69dbe408b68f-socket-dir\") pod \"csi-hostpathplugin-b57sr\" (UID: \"dc132003-e19f-4bdc-b77f-69dbe408b68f\") " pod="hostpath-provisioner/csi-hostpathplugin-b57sr" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.333258 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/fd9b8ba1-513d-4c37-aba0-ce6856f5f55c-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-bc7qj\" (UID: \"fd9b8ba1-513d-4c37-aba0-ce6856f5f55c\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-bc7qj" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.333276 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/29b802ef-670a-46bb-9ad3-03bddd7dc682-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-pjjch\" (UID: \"29b802ef-670a-46bb-9ad3-03bddd7dc682\") " pod="openshift-authentication/oauth-openshift-558db77b4-pjjch" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.333292 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/0b00b35e-d4f2-4be2-9bd1-e9aa6c617de7-certs\") pod \"machine-config-server-blwj9\" (UID: \"0b00b35e-d4f2-4be2-9bd1-e9aa6c617de7\") " pod="openshift-machine-config-operator/machine-config-server-blwj9" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.333330 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/52419e1c-e6c0-4225-95bf-da711c24e399-trusted-ca-bundle\") pod \"console-f9d7485db-hhcst\" (UID: \"52419e1c-e6c0-4225-95bf-da711c24e399\") " pod="openshift-console/console-f9d7485db-hhcst" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.333348 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6dcjg\" (UniqueName: \"kubernetes.io/projected/1ae0fa99-1f97-4233-953b-fd63f9a6a418-kube-api-access-6dcjg\") pod \"console-operator-58897d9998-jh4mm\" (UID: \"1ae0fa99-1f97-4233-953b-fd63f9a6a418\") " pod="openshift-console-operator/console-operator-58897d9998-jh4mm" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.333363 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8ed9d7da-9845-4c1a-8ec8-98f610a7cc9a-serving-cert\") pod \"controller-manager-879f6c89f-qlvq5\" (UID: \"8ed9d7da-9845-4c1a-8ec8-98f610a7cc9a\") " pod="openshift-controller-manager/controller-manager-879f6c89f-qlvq5" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.333400 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p649g\" (UniqueName: \"kubernetes.io/projected/3197e1c4-03d1-42f1-8bee-87a97962cf70-kube-api-access-p649g\") pod \"route-controller-manager-6576b87f9c-s9rfn\" (UID: \"3197e1c4-03d1-42f1-8bee-87a97962cf70\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-s9rfn" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.333429 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tf49f\" (UniqueName: \"kubernetes.io/projected/25ae8232-dd23-4bbd-bae1-76ab7f9ce10c-kube-api-access-tf49f\") pod \"cluster-image-registry-operator-dc59b4c8b-z2jm2\" (UID: \"25ae8232-dd23-4bbd-bae1-76ab7f9ce10c\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-z2jm2" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.333447 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fprlh\" (UniqueName: \"kubernetes.io/projected/fdf18ac4-14dd-4d17-9922-5db12fa07225-kube-api-access-fprlh\") pod \"machine-approver-56656f9798-n7hnm\" (UID: \"fdf18ac4-14dd-4d17-9922-5db12fa07225\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-n7hnm" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.333463 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q9n87\" (UniqueName: \"kubernetes.io/projected/29b802ef-670a-46bb-9ad3-03bddd7dc682-kube-api-access-q9n87\") pod \"oauth-openshift-558db77b4-pjjch\" (UID: \"29b802ef-670a-46bb-9ad3-03bddd7dc682\") " pod="openshift-authentication/oauth-openshift-558db77b4-pjjch" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.333479 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/72bf75a9-d335-40b3-ad88-bc72472d4256-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-h4zfh\" (UID: \"72bf75a9-d335-40b3-ad88-bc72472d4256\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-h4zfh" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.333498 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/b2767e5e-6a38-4668-95f6-f677e298c6f8-stats-auth\") pod \"router-default-5444994796-4bzgw\" (UID: \"b2767e5e-6a38-4668-95f6-f677e298c6f8\") " pod="openshift-ingress/router-default-5444994796-4bzgw" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.333515 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/05826cf2-7094-46a7-a08f-8f39f5fb3520-audit-dir\") pod \"apiserver-76f77b778f-nxttj\" (UID: \"05826cf2-7094-46a7-a08f-8f39f5fb3520\") " pod="openshift-apiserver/apiserver-76f77b778f-nxttj" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.333535 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nvn7f\" (UniqueName: \"kubernetes.io/projected/118f9ff6-7433-41ee-a6bd-84e14fd1ea98-kube-api-access-nvn7f\") pod \"cluster-samples-operator-665b6dd947-ksgbj\" (UID: \"118f9ff6-7433-41ee-a6bd-84e14fd1ea98\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-ksgbj" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.333551 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/0b00b35e-d4f2-4be2-9bd1-e9aa6c617de7-node-bootstrap-token\") pod \"machine-config-server-blwj9\" (UID: \"0b00b35e-d4f2-4be2-9bd1-e9aa6c617de7\") " pod="openshift-machine-config-operator/machine-config-server-blwj9" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.333567 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/8f956c33-ffd9-4517-b8d2-febf5b12f2d0-metrics-tls\") pod \"ingress-operator-5b745b69d9-hr49g\" (UID: \"8f956c33-ffd9-4517-b8d2-febf5b12f2d0\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-hr49g" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.333583 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fdf18ac4-14dd-4d17-9922-5db12fa07225-auth-proxy-config\") pod \"machine-approver-56656f9798-n7hnm\" (UID: \"fdf18ac4-14dd-4d17-9922-5db12fa07225\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-n7hnm" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.333636 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/29b802ef-670a-46bb-9ad3-03bddd7dc682-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-pjjch\" (UID: \"29b802ef-670a-46bb-9ad3-03bddd7dc682\") " pod="openshift-authentication/oauth-openshift-558db77b4-pjjch" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.333850 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/1ae0fa99-1f97-4233-953b-fd63f9a6a418-trusted-ca\") pod \"console-operator-58897d9998-jh4mm\" (UID: \"1ae0fa99-1f97-4233-953b-fd63f9a6a418\") " pod="openshift-console-operator/console-operator-58897d9998-jh4mm" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.333958 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7dc6c9bd-e7c8-44c0-b65d-0e335cc3b134-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-85d4c\" (UID: \"7dc6c9bd-e7c8-44c0-b65d-0e335cc3b134\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-85d4c" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.334226 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0bf68edf-b009-46c6-bdc0-05c48967d5d6-config\") pod \"service-ca-operator-777779d784-s7xfz\" (UID: \"0bf68edf-b009-46c6-bdc0-05c48967d5d6\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-s7xfz" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.334255 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-shpqc\" (UniqueName: \"kubernetes.io/projected/b3f6a87f-c596-4bfd-9330-13b5b8fabfe4-kube-api-access-shpqc\") pod \"machine-config-operator-74547568cd-px8c9\" (UID: \"b3f6a87f-c596-4bfd-9330-13b5b8fabfe4\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-px8c9" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.334272 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bmhfc\" (UniqueName: \"kubernetes.io/projected/8ed9d7da-9845-4c1a-8ec8-98f610a7cc9a-kube-api-access-bmhfc\") pod \"controller-manager-879f6c89f-qlvq5\" (UID: \"8ed9d7da-9845-4c1a-8ec8-98f610a7cc9a\") " pod="openshift-controller-manager/controller-manager-879f6c89f-qlvq5" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.334433 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/3197e1c4-03d1-42f1-8bee-87a97962cf70-client-ca\") pod \"route-controller-manager-6576b87f9c-s9rfn\" (UID: \"3197e1c4-03d1-42f1-8bee-87a97962cf70\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-s9rfn" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.334456 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3197e1c4-03d1-42f1-8bee-87a97962cf70-serving-cert\") pod \"route-controller-manager-6576b87f9c-s9rfn\" (UID: \"3197e1c4-03d1-42f1-8bee-87a97962cf70\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-s9rfn" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.334498 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/05826cf2-7094-46a7-a08f-8f39f5fb3520-trusted-ca-bundle\") pod \"apiserver-76f77b778f-nxttj\" (UID: \"05826cf2-7094-46a7-a08f-8f39f5fb3520\") " pod="openshift-apiserver/apiserver-76f77b778f-nxttj" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.334528 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/014a4efc-c5f5-42c9-b90f-557a51659e67-config\") pod \"kube-controller-manager-operator-78b949d7b-rrmfq\" (UID: \"014a4efc-c5f5-42c9-b90f-557a51659e67\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-rrmfq" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.334543 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/e5039b4e-b9b6-4d60-b5b6-c5d5c6765ac8-signing-cabundle\") pod \"service-ca-9c57cc56f-6g6c5\" (UID: \"e5039b4e-b9b6-4d60-b5b6-c5d5c6765ac8\") " pod="openshift-service-ca/service-ca-9c57cc56f-6g6c5" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.334558 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zhdnc\" (UniqueName: \"kubernetes.io/projected/01f30d6b-1381-4cd8-9a61-ed94d536d2a2-kube-api-access-zhdnc\") pod \"kube-storage-version-migrator-operator-b67b599dd-2n6g9\" (UID: \"01f30d6b-1381-4cd8-9a61-ed94d536d2a2\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-2n6g9" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.334576 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kfhlk\" (UniqueName: \"kubernetes.io/projected/7a54027e-55dc-42f9-aa21-1c82ec4d1b4b-kube-api-access-kfhlk\") pod \"packageserver-d55dfcdfc-wsbvn\" (UID: \"7a54027e-55dc-42f9-aa21-1c82ec4d1b4b\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-wsbvn" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.334592 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z5h6j\" (UniqueName: \"kubernetes.io/projected/ec0b9d1c-6776-4434-9a56-756a3fc1fc5e-kube-api-access-z5h6j\") pod \"marketplace-operator-79b997595-bt79h\" (UID: \"ec0b9d1c-6776-4434-9a56-756a3fc1fc5e\") " pod="openshift-marketplace/marketplace-operator-79b997595-bt79h" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.334615 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/8ed9d7da-9845-4c1a-8ec8-98f610a7cc9a-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-qlvq5\" (UID: \"8ed9d7da-9845-4c1a-8ec8-98f610a7cc9a\") " pod="openshift-controller-manager/controller-manager-879f6c89f-qlvq5" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.334631 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8b4pp\" (UniqueName: \"kubernetes.io/projected/308f2878-9a95-4b6a-8d03-90c431c05c1f-kube-api-access-8b4pp\") pod \"openshift-config-operator-7777fb866f-lzrjb\" (UID: \"308f2878-9a95-4b6a-8d03-90c431c05c1f\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-lzrjb" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.334649 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kqr2m\" (UniqueName: \"kubernetes.io/projected/52419e1c-e6c0-4225-95bf-da711c24e399-kube-api-access-kqr2m\") pod \"console-f9d7485db-hhcst\" (UID: \"52419e1c-e6c0-4225-95bf-da711c24e399\") " pod="openshift-console/console-f9d7485db-hhcst" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.334666 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/05826cf2-7094-46a7-a08f-8f39f5fb3520-image-import-ca\") pod \"apiserver-76f77b778f-nxttj\" (UID: \"05826cf2-7094-46a7-a08f-8f39f5fb3520\") " pod="openshift-apiserver/apiserver-76f77b778f-nxttj" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.334683 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/0ab1562e-a39e-4ddf-95ee-cf6ff520883d-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-66qtp\" (UID: \"0ab1562e-a39e-4ddf-95ee-cf6ff520883d\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-66qtp" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.334700 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/29b802ef-670a-46bb-9ad3-03bddd7dc682-audit-policies\") pod \"oauth-openshift-558db77b4-pjjch\" (UID: \"29b802ef-670a-46bb-9ad3-03bddd7dc682\") " pod="openshift-authentication/oauth-openshift-558db77b4-pjjch" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.334715 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/7a54027e-55dc-42f9-aa21-1c82ec4d1b4b-apiservice-cert\") pod \"packageserver-d55dfcdfc-wsbvn\" (UID: \"7a54027e-55dc-42f9-aa21-1c82ec4d1b4b\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-wsbvn" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.334735 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/a64632ed-ba47-4cfb-96a9-349b32995c3a-config-volume\") pod \"dns-default-q75j6\" (UID: \"a64632ed-ba47-4cfb-96a9-349b32995c3a\") " pod="openshift-dns/dns-default-q75j6" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.334753 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/308f2878-9a95-4b6a-8d03-90c431c05c1f-available-featuregates\") pod \"openshift-config-operator-7777fb866f-lzrjb\" (UID: \"308f2878-9a95-4b6a-8d03-90c431c05c1f\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-lzrjb" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.334769 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6c791206-2b2a-4372-b1ba-dc98863e8dcd-config\") pod \"etcd-operator-b45778765-d5hk2\" (UID: \"6c791206-2b2a-4372-b1ba-dc98863e8dcd\") " pod="openshift-etcd-operator/etcd-operator-b45778765-d5hk2" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.334786 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/0ab1562e-a39e-4ddf-95ee-cf6ff520883d-images\") pod \"machine-api-operator-5694c8668f-66qtp\" (UID: \"0ab1562e-a39e-4ddf-95ee-cf6ff520883d\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-66qtp" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.334802 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/118f9ff6-7433-41ee-a6bd-84e14fd1ea98-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-ksgbj\" (UID: \"118f9ff6-7433-41ee-a6bd-84e14fd1ea98\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-ksgbj" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.334817 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-428jl\" (UniqueName: \"kubernetes.io/projected/08924ea4-79d3-439f-8bdb-150f807221d9-kube-api-access-428jl\") pod \"image-registry-697d97f7c8-tgwxw\" (UID: \"08924ea4-79d3-439f-8bdb-150f807221d9\") " pod="openshift-image-registry/image-registry-697d97f7c8-tgwxw" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.334832 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/b2767e5e-6a38-4668-95f6-f677e298c6f8-default-certificate\") pod \"router-default-5444994796-4bzgw\" (UID: \"b2767e5e-6a38-4668-95f6-f677e298c6f8\") " pod="openshift-ingress/router-default-5444994796-4bzgw" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.334848 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/25ae8232-dd23-4bbd-bae1-76ab7f9ce10c-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-z2jm2\" (UID: \"25ae8232-dd23-4bbd-bae1-76ab7f9ce10c\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-z2jm2" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.334863 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/336740f0-4088-499f-8090-b2c86ce4bf28-metrics-tls\") pod \"dns-operator-744455d44c-9tvpg\" (UID: \"336740f0-4088-499f-8090-b2c86ce4bf28\") " pod="openshift-dns-operator/dns-operator-744455d44c-9tvpg" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.334878 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/29b802ef-670a-46bb-9ad3-03bddd7dc682-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-pjjch\" (UID: \"29b802ef-670a-46bb-9ad3-03bddd7dc682\") " pod="openshift-authentication/oauth-openshift-558db77b4-pjjch" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.334894 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/29b802ef-670a-46bb-9ad3-03bddd7dc682-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-pjjch\" (UID: \"29b802ef-670a-46bb-9ad3-03bddd7dc682\") " pod="openshift-authentication/oauth-openshift-558db77b4-pjjch" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.334911 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w7lxg\" (UniqueName: \"kubernetes.io/projected/1209880f-ab8c-4964-8796-81bc1fdf803b-kube-api-access-w7lxg\") pod \"machine-config-controller-84d6567774-cbd6x\" (UID: \"1209880f-ab8c-4964-8796-81bc1fdf803b\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-cbd6x" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.334937 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m7gfn\" (UniqueName: \"kubernetes.io/projected/d6ea9901-9f72-4641-b427-15ae8e7a5bfa-kube-api-access-m7gfn\") pod \"ingress-canary-vvp2h\" (UID: \"d6ea9901-9f72-4641-b427-15ae8e7a5bfa\") " pod="openshift-ingress-canary/ingress-canary-vvp2h" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.334943 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/05826cf2-7094-46a7-a08f-8f39f5fb3520-etcd-serving-ca\") pod \"apiserver-76f77b778f-nxttj\" (UID: \"05826cf2-7094-46a7-a08f-8f39f5fb3520\") " pod="openshift-apiserver/apiserver-76f77b778f-nxttj" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.334952 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/ec0b9d1c-6776-4434-9a56-756a3fc1fc5e-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-bt79h\" (UID: \"ec0b9d1c-6776-4434-9a56-756a3fc1fc5e\") " pod="openshift-marketplace/marketplace-operator-79b997595-bt79h" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.334971 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/08924ea4-79d3-439f-8bdb-150f807221d9-registry-certificates\") pod \"image-registry-697d97f7c8-tgwxw\" (UID: \"08924ea4-79d3-439f-8bdb-150f807221d9\") " pod="openshift-image-registry/image-registry-697d97f7c8-tgwxw" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.334990 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fdf18ac4-14dd-4d17-9922-5db12fa07225-config\") pod \"machine-approver-56656f9798-n7hnm\" (UID: \"fdf18ac4-14dd-4d17-9922-5db12fa07225\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-n7hnm" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.335007 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/29b802ef-670a-46bb-9ad3-03bddd7dc682-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-pjjch\" (UID: \"29b802ef-670a-46bb-9ad3-03bddd7dc682\") " pod="openshift-authentication/oauth-openshift-558db77b4-pjjch" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.335148 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/05826cf2-7094-46a7-a08f-8f39f5fb3520-audit\") pod \"apiserver-76f77b778f-nxttj\" (UID: \"05826cf2-7094-46a7-a08f-8f39f5fb3520\") " pod="openshift-apiserver/apiserver-76f77b778f-nxttj" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.335339 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/1209880f-ab8c-4964-8796-81bc1fdf803b-proxy-tls\") pod \"machine-config-controller-84d6567774-cbd6x\" (UID: \"1209880f-ab8c-4964-8796-81bc1fdf803b\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-cbd6x" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.335391 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/08924ea4-79d3-439f-8bdb-150f807221d9-installation-pull-secrets\") pod \"image-registry-697d97f7c8-tgwxw\" (UID: \"08924ea4-79d3-439f-8bdb-150f807221d9\") " pod="openshift-image-registry/image-registry-697d97f7c8-tgwxw" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.335422 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/1209880f-ab8c-4964-8796-81bc1fdf803b-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-cbd6x\" (UID: \"1209880f-ab8c-4964-8796-81bc1fdf803b\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-cbd6x" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.335452 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6dpgg\" (UniqueName: \"kubernetes.io/projected/2d999e79-a467-4f19-a67a-f5993c6b4423-kube-api-access-6dpgg\") pod \"collect-profiles-29319540-c6vdh\" (UID: \"2d999e79-a467-4f19-a67a-f5993c6b4423\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319540-c6vdh" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.335477 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/9e558fcb-c770-4581-a192-0f396bab99c7-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-bh65l\" (UID: \"9e558fcb-c770-4581-a192-0f396bab99c7\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-bh65l" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.335501 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/29b802ef-670a-46bb-9ad3-03bddd7dc682-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-pjjch\" (UID: \"29b802ef-670a-46bb-9ad3-03bddd7dc682\") " pod="openshift-authentication/oauth-openshift-558db77b4-pjjch" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.335569 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/08924ea4-79d3-439f-8bdb-150f807221d9-ca-trust-extracted\") pod \"image-registry-697d97f7c8-tgwxw\" (UID: \"08924ea4-79d3-439f-8bdb-150f807221d9\") " pod="openshift-image-registry/image-registry-697d97f7c8-tgwxw" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.335594 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/05826cf2-7094-46a7-a08f-8f39f5fb3520-encryption-config\") pod \"apiserver-76f77b778f-nxttj\" (UID: \"05826cf2-7094-46a7-a08f-8f39f5fb3520\") " pod="openshift-apiserver/apiserver-76f77b778f-nxttj" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.335610 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/29b802ef-670a-46bb-9ad3-03bddd7dc682-audit-dir\") pod \"oauth-openshift-558db77b4-pjjch\" (UID: \"29b802ef-670a-46bb-9ad3-03bddd7dc682\") " pod="openshift-authentication/oauth-openshift-558db77b4-pjjch" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.335628 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9e558fcb-c770-4581-a192-0f396bab99c7-config\") pod \"authentication-operator-69f744f599-bh65l\" (UID: \"9e558fcb-c770-4581-a192-0f396bab99c7\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-bh65l" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.335645 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/29b802ef-670a-46bb-9ad3-03bddd7dc682-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-pjjch\" (UID: \"29b802ef-670a-46bb-9ad3-03bddd7dc682\") " pod="openshift-authentication/oauth-openshift-558db77b4-pjjch" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.335662 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/08924ea4-79d3-439f-8bdb-150f807221d9-registry-tls\") pod \"image-registry-697d97f7c8-tgwxw\" (UID: \"08924ea4-79d3-439f-8bdb-150f807221d9\") " pod="openshift-image-registry/image-registry-697d97f7c8-tgwxw" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.335679 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/25ae8232-dd23-4bbd-bae1-76ab7f9ce10c-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-z2jm2\" (UID: \"25ae8232-dd23-4bbd-bae1-76ab7f9ce10c\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-z2jm2" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.335697 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01f30d6b-1381-4cd8-9a61-ed94d536d2a2-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-2n6g9\" (UID: \"01f30d6b-1381-4cd8-9a61-ed94d536d2a2\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-2n6g9" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.335718 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/ec0b9d1c-6776-4434-9a56-756a3fc1fc5e-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-bt79h\" (UID: \"ec0b9d1c-6776-4434-9a56-756a3fc1fc5e\") " pod="openshift-marketplace/marketplace-operator-79b997595-bt79h" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.335737 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/08924ea4-79d3-439f-8bdb-150f807221d9-trusted-ca\") pod \"image-registry-697d97f7c8-tgwxw\" (UID: \"08924ea4-79d3-439f-8bdb-150f807221d9\") " pod="openshift-image-registry/image-registry-697d97f7c8-tgwxw" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.335755 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hfpjt\" (UniqueName: \"kubernetes.io/projected/dc132003-e19f-4bdc-b77f-69dbe408b68f-kube-api-access-hfpjt\") pod \"csi-hostpathplugin-b57sr\" (UID: \"dc132003-e19f-4bdc-b77f-69dbe408b68f\") " pod="hostpath-provisioner/csi-hostpathplugin-b57sr" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.335774 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jdgxf\" (UniqueName: \"kubernetes.io/projected/e9fff169-c8a1-4062-9a2a-ab4c1e790c07-kube-api-access-jdgxf\") pod \"control-plane-machine-set-operator-78cbb6b69f-fcw8k\" (UID: \"e9fff169-c8a1-4062-9a2a-ab4c1e790c07\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-fcw8k" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.335794 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/6c791206-2b2a-4372-b1ba-dc98863e8dcd-etcd-client\") pod \"etcd-operator-b45778765-d5hk2\" (UID: \"6c791206-2b2a-4372-b1ba-dc98863e8dcd\") " pod="openshift-etcd-operator/etcd-operator-b45778765-d5hk2" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.335812 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/fdf18ac4-14dd-4d17-9922-5db12fa07225-machine-approver-tls\") pod \"machine-approver-56656f9798-n7hnm\" (UID: \"fdf18ac4-14dd-4d17-9922-5db12fa07225\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-n7hnm" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.335840 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-twng6\" (UniqueName: \"kubernetes.io/projected/9e558fcb-c770-4581-a192-0f396bab99c7-kube-api-access-twng6\") pod \"authentication-operator-69f744f599-bh65l\" (UID: \"9e558fcb-c770-4581-a192-0f396bab99c7\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-bh65l" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.335857 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/52419e1c-e6c0-4225-95bf-da711c24e399-console-serving-cert\") pod \"console-f9d7485db-hhcst\" (UID: \"52419e1c-e6c0-4225-95bf-da711c24e399\") " pod="openshift-console/console-f9d7485db-hhcst" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.336115 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6c791206-2b2a-4372-b1ba-dc98863e8dcd-serving-cert\") pod \"etcd-operator-b45778765-d5hk2\" (UID: \"6c791206-2b2a-4372-b1ba-dc98863e8dcd\") " pod="openshift-etcd-operator/etcd-operator-b45778765-d5hk2" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.336375 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/d6ea9901-9f72-4641-b427-15ae8e7a5bfa-cert\") pod \"ingress-canary-vvp2h\" (UID: \"d6ea9901-9f72-4641-b427-15ae8e7a5bfa\") " pod="openshift-ingress-canary/ingress-canary-vvp2h" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.337105 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/52419e1c-e6c0-4225-95bf-da711c24e399-console-oauth-config\") pod \"console-f9d7485db-hhcst\" (UID: \"52419e1c-e6c0-4225-95bf-da711c24e399\") " pod="openshift-console/console-f9d7485db-hhcst" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.337136 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/29b802ef-670a-46bb-9ad3-03bddd7dc682-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-pjjch\" (UID: \"29b802ef-670a-46bb-9ad3-03bddd7dc682\") " pod="openshift-authentication/oauth-openshift-558db77b4-pjjch" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.337192 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w6pwz\" (UniqueName: \"kubernetes.io/projected/a64632ed-ba47-4cfb-96a9-349b32995c3a-kube-api-access-w6pwz\") pod \"dns-default-q75j6\" (UID: \"a64632ed-ba47-4cfb-96a9-349b32995c3a\") " pod="openshift-dns/dns-default-q75j6" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.337215 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/fd9b8ba1-513d-4c37-aba0-ce6856f5f55c-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-bc7qj\" (UID: \"fd9b8ba1-513d-4c37-aba0-ce6856f5f55c\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-bc7qj" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.337234 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hzsjx\" (UniqueName: \"kubernetes.io/projected/6c791206-2b2a-4372-b1ba-dc98863e8dcd-kube-api-access-hzsjx\") pod \"etcd-operator-b45778765-d5hk2\" (UID: \"6c791206-2b2a-4372-b1ba-dc98863e8dcd\") " pod="openshift-etcd-operator/etcd-operator-b45778765-d5hk2" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.337277 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/7a54027e-55dc-42f9-aa21-1c82ec4d1b4b-webhook-cert\") pod \"packageserver-d55dfcdfc-wsbvn\" (UID: \"7a54027e-55dc-42f9-aa21-1c82ec4d1b4b\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-wsbvn" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.337304 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/b3f6a87f-c596-4bfd-9330-13b5b8fabfe4-images\") pod \"machine-config-operator-74547568cd-px8c9\" (UID: \"b3f6a87f-c596-4bfd-9330-13b5b8fabfe4\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-px8c9" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.338161 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/05826cf2-7094-46a7-a08f-8f39f5fb3520-etcd-client\") pod \"apiserver-76f77b778f-nxttj\" (UID: \"05826cf2-7094-46a7-a08f-8f39f5fb3520\") " pod="openshift-apiserver/apiserver-76f77b778f-nxttj" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.338945 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/52419e1c-e6c0-4225-95bf-da711c24e399-console-serving-cert\") pod \"console-f9d7485db-hhcst\" (UID: \"52419e1c-e6c0-4225-95bf-da711c24e399\") " pod="openshift-console/console-f9d7485db-hhcst" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.339442 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/05826cf2-7094-46a7-a08f-8f39f5fb3520-serving-cert\") pod \"apiserver-76f77b778f-nxttj\" (UID: \"05826cf2-7094-46a7-a08f-8f39f5fb3520\") " pod="openshift-apiserver/apiserver-76f77b778f-nxttj" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.339652 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/e9fff169-c8a1-4062-9a2a-ab4c1e790c07-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-fcw8k\" (UID: \"e9fff169-c8a1-4062-9a2a-ab4c1e790c07\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-fcw8k" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.339669 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/29b802ef-670a-46bb-9ad3-03bddd7dc682-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-pjjch\" (UID: \"29b802ef-670a-46bb-9ad3-03bddd7dc682\") " pod="openshift-authentication/oauth-openshift-558db77b4-pjjch" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.332930 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/52419e1c-e6c0-4225-95bf-da711c24e399-console-config\") pod \"console-f9d7485db-hhcst\" (UID: \"52419e1c-e6c0-4225-95bf-da711c24e399\") " pod="openshift-console/console-f9d7485db-hhcst" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.336839 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/b3f6a87f-c596-4bfd-9330-13b5b8fabfe4-proxy-tls\") pod \"machine-config-operator-74547568cd-px8c9\" (UID: \"b3f6a87f-c596-4bfd-9330-13b5b8fabfe4\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-px8c9" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.340286 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/29b802ef-670a-46bb-9ad3-03bddd7dc682-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-pjjch\" (UID: \"29b802ef-670a-46bb-9ad3-03bddd7dc682\") " pod="openshift-authentication/oauth-openshift-558db77b4-pjjch" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.341009 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/b3f6a87f-c596-4bfd-9330-13b5b8fabfe4-images\") pod \"machine-config-operator-74547568cd-px8c9\" (UID: \"b3f6a87f-c596-4bfd-9330-13b5b8fabfe4\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-px8c9" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.341206 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/b3f6a87f-c596-4bfd-9330-13b5b8fabfe4-auth-proxy-config\") pod \"machine-config-operator-74547568cd-px8c9\" (UID: \"b3f6a87f-c596-4bfd-9330-13b5b8fabfe4\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-px8c9" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.342067 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/308f2878-9a95-4b6a-8d03-90c431c05c1f-serving-cert\") pod \"openshift-config-operator-7777fb866f-lzrjb\" (UID: \"308f2878-9a95-4b6a-8d03-90c431c05c1f\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-lzrjb" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.342178 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/29b802ef-670a-46bb-9ad3-03bddd7dc682-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-pjjch\" (UID: \"29b802ef-670a-46bb-9ad3-03bddd7dc682\") " pod="openshift-authentication/oauth-openshift-558db77b4-pjjch" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.342520 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/05826cf2-7094-46a7-a08f-8f39f5fb3520-trusted-ca-bundle\") pod \"apiserver-76f77b778f-nxttj\" (UID: \"05826cf2-7094-46a7-a08f-8f39f5fb3520\") " pod="openshift-apiserver/apiserver-76f77b778f-nxttj" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.342558 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/05826cf2-7094-46a7-a08f-8f39f5fb3520-audit-dir\") pod \"apiserver-76f77b778f-nxttj\" (UID: \"05826cf2-7094-46a7-a08f-8f39f5fb3520\") " pod="openshift-apiserver/apiserver-76f77b778f-nxttj" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.343110 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/336740f0-4088-499f-8090-b2c86ce4bf28-metrics-tls\") pod \"dns-operator-744455d44c-9tvpg\" (UID: \"336740f0-4088-499f-8090-b2c86ce4bf28\") " pod="openshift-dns-operator/dns-operator-744455d44c-9tvpg" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.343450 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fdf18ac4-14dd-4d17-9922-5db12fa07225-auth-proxy-config\") pod \"machine-approver-56656f9798-n7hnm\" (UID: \"fdf18ac4-14dd-4d17-9922-5db12fa07225\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-n7hnm" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.343645 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/05826cf2-7094-46a7-a08f-8f39f5fb3520-audit\") pod \"apiserver-76f77b778f-nxttj\" (UID: \"05826cf2-7094-46a7-a08f-8f39f5fb3520\") " pod="openshift-apiserver/apiserver-76f77b778f-nxttj" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.344190 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/29b802ef-670a-46bb-9ad3-03bddd7dc682-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-pjjch\" (UID: \"29b802ef-670a-46bb-9ad3-03bddd7dc682\") " pod="openshift-authentication/oauth-openshift-558db77b4-pjjch" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.344461 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/29b802ef-670a-46bb-9ad3-03bddd7dc682-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-pjjch\" (UID: \"29b802ef-670a-46bb-9ad3-03bddd7dc682\") " pod="openshift-authentication/oauth-openshift-558db77b4-pjjch" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.345333 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/08924ea4-79d3-439f-8bdb-150f807221d9-ca-trust-extracted\") pod \"image-registry-697d97f7c8-tgwxw\" (UID: \"08924ea4-79d3-439f-8bdb-150f807221d9\") " pod="openshift-image-registry/image-registry-697d97f7c8-tgwxw" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.345616 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/308f2878-9a95-4b6a-8d03-90c431c05c1f-available-featuregates\") pod \"openshift-config-operator-7777fb866f-lzrjb\" (UID: \"308f2878-9a95-4b6a-8d03-90c431c05c1f\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-lzrjb" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.346166 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/29b802ef-670a-46bb-9ad3-03bddd7dc682-audit-policies\") pod \"oauth-openshift-558db77b4-pjjch\" (UID: \"29b802ef-670a-46bb-9ad3-03bddd7dc682\") " pod="openshift-authentication/oauth-openshift-558db77b4-pjjch" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.346372 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/b2767e5e-6a38-4668-95f6-f677e298c6f8-default-certificate\") pod \"router-default-5444994796-4bzgw\" (UID: \"b2767e5e-6a38-4668-95f6-f677e298c6f8\") " pod="openshift-ingress/router-default-5444994796-4bzgw" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.346700 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/6c791206-2b2a-4372-b1ba-dc98863e8dcd-etcd-service-ca\") pod \"etcd-operator-b45778765-d5hk2\" (UID: \"6c791206-2b2a-4372-b1ba-dc98863e8dcd\") " pod="openshift-etcd-operator/etcd-operator-b45778765-d5hk2" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.346866 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fdf18ac4-14dd-4d17-9922-5db12fa07225-config\") pod \"machine-approver-56656f9798-n7hnm\" (UID: \"fdf18ac4-14dd-4d17-9922-5db12fa07225\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-n7hnm" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.346868 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/3197e1c4-03d1-42f1-8bee-87a97962cf70-client-ca\") pod \"route-controller-manager-6576b87f9c-s9rfn\" (UID: \"3197e1c4-03d1-42f1-8bee-87a97962cf70\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-s9rfn" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.347806 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/0ab1562e-a39e-4ddf-95ee-cf6ff520883d-images\") pod \"machine-api-operator-5694c8668f-66qtp\" (UID: \"0ab1562e-a39e-4ddf-95ee-cf6ff520883d\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-66qtp" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.348300 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6c791206-2b2a-4372-b1ba-dc98863e8dcd-config\") pod \"etcd-operator-b45778765-d5hk2\" (UID: \"6c791206-2b2a-4372-b1ba-dc98863e8dcd\") " pod="openshift-etcd-operator/etcd-operator-b45778765-d5hk2" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.348307 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1ae0fa99-1f97-4233-953b-fd63f9a6a418-serving-cert\") pod \"console-operator-58897d9998-jh4mm\" (UID: \"1ae0fa99-1f97-4233-953b-fd63f9a6a418\") " pod="openshift-console-operator/console-operator-58897d9998-jh4mm" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.348409 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/52419e1c-e6c0-4225-95bf-da711c24e399-trusted-ca-bundle\") pod \"console-f9d7485db-hhcst\" (UID: \"52419e1c-e6c0-4225-95bf-da711c24e399\") " pod="openshift-console/console-f9d7485db-hhcst" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.348432 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/29b802ef-670a-46bb-9ad3-03bddd7dc682-audit-dir\") pod \"oauth-openshift-558db77b4-pjjch\" (UID: \"29b802ef-670a-46bb-9ad3-03bddd7dc682\") " pod="openshift-authentication/oauth-openshift-558db77b4-pjjch" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.348956 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/b2767e5e-6a38-4668-95f6-f677e298c6f8-stats-auth\") pod \"router-default-5444994796-4bzgw\" (UID: \"b2767e5e-6a38-4668-95f6-f677e298c6f8\") " pod="openshift-ingress/router-default-5444994796-4bzgw" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.349145 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8ed9d7da-9845-4c1a-8ec8-98f610a7cc9a-serving-cert\") pod \"controller-manager-879f6c89f-qlvq5\" (UID: \"8ed9d7da-9845-4c1a-8ec8-98f610a7cc9a\") " pod="openshift-controller-manager/controller-manager-879f6c89f-qlvq5" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.349196 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/9e558fcb-c770-4581-a192-0f396bab99c7-service-ca-bundle\") pod \"authentication-operator-69f744f599-bh65l\" (UID: \"9e558fcb-c770-4581-a192-0f396bab99c7\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-bh65l" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.349473 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fd9b8ba1-513d-4c37-aba0-ce6856f5f55c-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-bc7qj\" (UID: \"fd9b8ba1-513d-4c37-aba0-ce6856f5f55c\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-bc7qj" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.349784 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/b2767e5e-6a38-4668-95f6-f677e298c6f8-metrics-certs\") pod \"router-default-5444994796-4bzgw\" (UID: \"b2767e5e-6a38-4668-95f6-f677e298c6f8\") " pod="openshift-ingress/router-default-5444994796-4bzgw" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.349973 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/8ed9d7da-9845-4c1a-8ec8-98f610a7cc9a-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-qlvq5\" (UID: \"8ed9d7da-9845-4c1a-8ec8-98f610a7cc9a\") " pod="openshift-controller-manager/controller-manager-879f6c89f-qlvq5" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.350448 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9e558fcb-c770-4581-a192-0f396bab99c7-config\") pod \"authentication-operator-69f744f599-bh65l\" (UID: \"9e558fcb-c770-4581-a192-0f396bab99c7\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-bh65l" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.350624 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-8528j"] Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.350673 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/25ae8232-dd23-4bbd-bae1-76ab7f9ce10c-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-z2jm2\" (UID: \"25ae8232-dd23-4bbd-bae1-76ab7f9ce10c\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-z2jm2" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.350693 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/9e558fcb-c770-4581-a192-0f396bab99c7-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-bh65l\" (UID: \"9e558fcb-c770-4581-a192-0f396bab99c7\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-bh65l" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.351223 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/52419e1c-e6c0-4225-95bf-da711c24e399-service-ca\") pod \"console-f9d7485db-hhcst\" (UID: \"52419e1c-e6c0-4225-95bf-da711c24e399\") " pod="openshift-console/console-f9d7485db-hhcst" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.351909 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/08924ea4-79d3-439f-8bdb-150f807221d9-trusted-ca\") pod \"image-registry-697d97f7c8-tgwxw\" (UID: \"08924ea4-79d3-439f-8bdb-150f807221d9\") " pod="openshift-image-registry/image-registry-697d97f7c8-tgwxw" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.352450 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/6c791206-2b2a-4372-b1ba-dc98863e8dcd-etcd-client\") pod \"etcd-operator-b45778765-d5hk2\" (UID: \"6c791206-2b2a-4372-b1ba-dc98863e8dcd\") " pod="openshift-etcd-operator/etcd-operator-b45778765-d5hk2" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.352491 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9e558fcb-c770-4581-a192-0f396bab99c7-serving-cert\") pod \"authentication-operator-69f744f599-bh65l\" (UID: \"9e558fcb-c770-4581-a192-0f396bab99c7\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-bh65l" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.353255 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/29b802ef-670a-46bb-9ad3-03bddd7dc682-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-pjjch\" (UID: \"29b802ef-670a-46bb-9ad3-03bddd7dc682\") " pod="openshift-authentication/oauth-openshift-558db77b4-pjjch" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.353355 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/52419e1c-e6c0-4225-95bf-da711c24e399-oauth-serving-cert\") pod \"console-f9d7485db-hhcst\" (UID: \"52419e1c-e6c0-4225-95bf-da711c24e399\") " pod="openshift-console/console-f9d7485db-hhcst" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.353535 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/29b802ef-670a-46bb-9ad3-03bddd7dc682-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-pjjch\" (UID: \"29b802ef-670a-46bb-9ad3-03bddd7dc682\") " pod="openshift-authentication/oauth-openshift-558db77b4-pjjch" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.353568 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/0ab1562e-a39e-4ddf-95ee-cf6ff520883d-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-66qtp\" (UID: \"0ab1562e-a39e-4ddf-95ee-cf6ff520883d\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-66qtp" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.353891 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/29b802ef-670a-46bb-9ad3-03bddd7dc682-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-pjjch\" (UID: \"29b802ef-670a-46bb-9ad3-03bddd7dc682\") " pod="openshift-authentication/oauth-openshift-558db77b4-pjjch" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.354054 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/05826cf2-7094-46a7-a08f-8f39f5fb3520-config\") pod \"apiserver-76f77b778f-nxttj\" (UID: \"05826cf2-7094-46a7-a08f-8f39f5fb3520\") " pod="openshift-apiserver/apiserver-76f77b778f-nxttj" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.354093 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/08924ea4-79d3-439f-8bdb-150f807221d9-registry-tls\") pod \"image-registry-697d97f7c8-tgwxw\" (UID: \"08924ea4-79d3-439f-8bdb-150f807221d9\") " pod="openshift-image-registry/image-registry-697d97f7c8-tgwxw" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.354516 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/05826cf2-7094-46a7-a08f-8f39f5fb3520-image-import-ca\") pod \"apiserver-76f77b778f-nxttj\" (UID: \"05826cf2-7094-46a7-a08f-8f39f5fb3520\") " pod="openshift-apiserver/apiserver-76f77b778f-nxttj" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.354684 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/6c791206-2b2a-4372-b1ba-dc98863e8dcd-etcd-ca\") pod \"etcd-operator-b45778765-d5hk2\" (UID: \"6c791206-2b2a-4372-b1ba-dc98863e8dcd\") " pod="openshift-etcd-operator/etcd-operator-b45778765-d5hk2" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.354727 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0ab1562e-a39e-4ddf-95ee-cf6ff520883d-config\") pod \"machine-api-operator-5694c8668f-66qtp\" (UID: \"0ab1562e-a39e-4ddf-95ee-cf6ff520883d\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-66qtp" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.354733 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/118f9ff6-7433-41ee-a6bd-84e14fd1ea98-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-ksgbj\" (UID: \"118f9ff6-7433-41ee-a6bd-84e14fd1ea98\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-ksgbj" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.355237 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/08924ea4-79d3-439f-8bdb-150f807221d9-registry-certificates\") pod \"image-registry-697d97f7c8-tgwxw\" (UID: \"08924ea4-79d3-439f-8bdb-150f807221d9\") " pod="openshift-image-registry/image-registry-697d97f7c8-tgwxw" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.355457 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-lx98g" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.355483 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/29b802ef-670a-46bb-9ad3-03bddd7dc682-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-pjjch\" (UID: \"29b802ef-670a-46bb-9ad3-03bddd7dc682\") " pod="openshift-authentication/oauth-openshift-558db77b4-pjjch" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.355665 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/fd9b8ba1-513d-4c37-aba0-ce6856f5f55c-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-bc7qj\" (UID: \"fd9b8ba1-513d-4c37-aba0-ce6856f5f55c\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-bc7qj" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.355798 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/05826cf2-7094-46a7-a08f-8f39f5fb3520-encryption-config\") pod \"apiserver-76f77b778f-nxttj\" (UID: \"05826cf2-7094-46a7-a08f-8f39f5fb3520\") " pod="openshift-apiserver/apiserver-76f77b778f-nxttj" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.355863 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/25ae8232-dd23-4bbd-bae1-76ab7f9ce10c-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-z2jm2\" (UID: \"25ae8232-dd23-4bbd-bae1-76ab7f9ce10c\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-z2jm2" Sep 29 19:10:41 crc kubenswrapper[4779]: W0929 19:10:41.356075 4779 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod73ef5985_06f9_448f_ab77_7768c6b5face.slice/crio-73828b439344ae2973e48609b25f705ac53221b035b2abf2c5aef1e08880f3b3 WatchSource:0}: Error finding container 73828b439344ae2973e48609b25f705ac53221b035b2abf2c5aef1e08880f3b3: Status 404 returned error can't find the container with id 73828b439344ae2973e48609b25f705ac53221b035b2abf2c5aef1e08880f3b3 Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.356895 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/fdf18ac4-14dd-4d17-9922-5db12fa07225-machine-approver-tls\") pod \"machine-approver-56656f9798-n7hnm\" (UID: \"fdf18ac4-14dd-4d17-9922-5db12fa07225\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-n7hnm" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.357080 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/29b802ef-670a-46bb-9ad3-03bddd7dc682-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-pjjch\" (UID: \"29b802ef-670a-46bb-9ad3-03bddd7dc682\") " pod="openshift-authentication/oauth-openshift-558db77b4-pjjch" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.357604 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3197e1c4-03d1-42f1-8bee-87a97962cf70-serving-cert\") pod \"route-controller-manager-6576b87f9c-s9rfn\" (UID: \"3197e1c4-03d1-42f1-8bee-87a97962cf70\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-s9rfn" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.357846 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8ed9d7da-9845-4c1a-8ec8-98f610a7cc9a-config\") pod \"controller-manager-879f6c89f-qlvq5\" (UID: \"8ed9d7da-9845-4c1a-8ec8-98f610a7cc9a\") " pod="openshift-controller-manager/controller-manager-879f6c89f-qlvq5" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.358117 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/52419e1c-e6c0-4225-95bf-da711c24e399-console-oauth-config\") pod \"console-f9d7485db-hhcst\" (UID: \"52419e1c-e6c0-4225-95bf-da711c24e399\") " pod="openshift-console/console-f9d7485db-hhcst" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.370180 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cnpf9\" (UniqueName: \"kubernetes.io/projected/0ab1562e-a39e-4ddf-95ee-cf6ff520883d-kube-api-access-cnpf9\") pod \"machine-api-operator-5694c8668f-66qtp\" (UID: \"0ab1562e-a39e-4ddf-95ee-cf6ff520883d\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-66qtp" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.379092 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-nrx92"] Sep 29 19:10:41 crc kubenswrapper[4779]: W0929 19:10:41.386020 4779 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod958cb75b_851b_4661_87e2_1eac8aa8e922.slice/crio-1e48207a1288b2b89b4b80d6261fecc6ebf09cd1c27603c46d5fe2b932f8e0df WatchSource:0}: Error finding container 1e48207a1288b2b89b4b80d6261fecc6ebf09cd1c27603c46d5fe2b932f8e0df: Status 404 returned error can't find the container with id 1e48207a1288b2b89b4b80d6261fecc6ebf09cd1c27603c46d5fe2b932f8e0df Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.389682 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tf49f\" (UniqueName: \"kubernetes.io/projected/25ae8232-dd23-4bbd-bae1-76ab7f9ce10c-kube-api-access-tf49f\") pod \"cluster-image-registry-operator-dc59b4c8b-z2jm2\" (UID: \"25ae8232-dd23-4bbd-bae1-76ab7f9ce10c\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-z2jm2" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.392048 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-zkscm" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.410460 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rx86b\" (UniqueName: \"kubernetes.io/projected/05826cf2-7094-46a7-a08f-8f39f5fb3520-kube-api-access-rx86b\") pod \"apiserver-76f77b778f-nxttj\" (UID: \"05826cf2-7094-46a7-a08f-8f39f5fb3520\") " pod="openshift-apiserver/apiserver-76f77b778f-nxttj" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.430736 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/fd9b8ba1-513d-4c37-aba0-ce6856f5f55c-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-bc7qj\" (UID: \"fd9b8ba1-513d-4c37-aba0-ce6856f5f55c\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-bc7qj" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.439272 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.439506 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bdpjp\" (UniqueName: \"kubernetes.io/projected/72bf75a9-d335-40b3-ad88-bc72472d4256-kube-api-access-bdpjp\") pod \"package-server-manager-789f6589d5-h4zfh\" (UID: \"72bf75a9-d335-40b3-ad88-bc72472d4256\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-h4zfh" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.439533 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/935082cd-101b-44e0-8315-186d166a1b2a-srv-cert\") pod \"catalog-operator-68c6474976-6js75\" (UID: \"935082cd-101b-44e0-8315-186d166a1b2a\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-6js75" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.439557 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-28qm7\" (UniqueName: \"kubernetes.io/projected/9b795657-72fc-4b72-9186-f0dc24678b36-kube-api-access-28qm7\") pod \"migrator-59844c95c7-fpfpb\" (UID: \"9b795657-72fc-4b72-9186-f0dc24678b36\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-fpfpb" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.439574 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/dc132003-e19f-4bdc-b77f-69dbe408b68f-csi-data-dir\") pod \"csi-hostpathplugin-b57sr\" (UID: \"dc132003-e19f-4bdc-b77f-69dbe408b68f\") " pod="hostpath-provisioner/csi-hostpathplugin-b57sr" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.439592 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/dc132003-e19f-4bdc-b77f-69dbe408b68f-registration-dir\") pod \"csi-hostpathplugin-b57sr\" (UID: \"dc132003-e19f-4bdc-b77f-69dbe408b68f\") " pod="hostpath-provisioner/csi-hostpathplugin-b57sr" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.439619 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f956c33-ffd9-4517-b8d2-febf5b12f2d0-bound-sa-token\") pod \"ingress-operator-5b745b69d9-hr49g\" (UID: \"8f956c33-ffd9-4517-b8d2-febf5b12f2d0\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-hr49g" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.439634 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/e5039b4e-b9b6-4d60-b5b6-c5d5c6765ac8-signing-key\") pod \"service-ca-9c57cc56f-6g6c5\" (UID: \"e5039b4e-b9b6-4d60-b5b6-c5d5c6765ac8\") " pod="openshift-service-ca/service-ca-9c57cc56f-6g6c5" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.439649 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gbdqs\" (UniqueName: \"kubernetes.io/projected/935082cd-101b-44e0-8315-186d166a1b2a-kube-api-access-gbdqs\") pod \"catalog-operator-68c6474976-6js75\" (UID: \"935082cd-101b-44e0-8315-186d166a1b2a\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-6js75" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.439665 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cgjmb\" (UniqueName: \"kubernetes.io/projected/0bf68edf-b009-46c6-bdc0-05c48967d5d6-kube-api-access-cgjmb\") pod \"service-ca-operator-777779d784-s7xfz\" (UID: \"0bf68edf-b009-46c6-bdc0-05c48967d5d6\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-s7xfz" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.439680 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/2d999e79-a467-4f19-a67a-f5993c6b4423-config-volume\") pod \"collect-profiles-29319540-c6vdh\" (UID: \"2d999e79-a467-4f19-a67a-f5993c6b4423\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319540-c6vdh" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.439696 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/44f9f157-a816-4fd6-aff5-eccc66abb454-profile-collector-cert\") pod \"olm-operator-6b444d44fb-kdxj8\" (UID: \"44f9f157-a816-4fd6-aff5-eccc66abb454\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-kdxj8" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.439715 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01f30d6b-1381-4cd8-9a61-ed94d536d2a2-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-2n6g9\" (UID: \"01f30d6b-1381-4cd8-9a61-ed94d536d2a2\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-2n6g9" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.439735 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/935082cd-101b-44e0-8315-186d166a1b2a-profile-collector-cert\") pod \"catalog-operator-68c6474976-6js75\" (UID: \"935082cd-101b-44e0-8315-186d166a1b2a\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-6js75" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.439749 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w468h\" (UniqueName: \"kubernetes.io/projected/8f956c33-ffd9-4517-b8d2-febf5b12f2d0-kube-api-access-w468h\") pod \"ingress-operator-5b745b69d9-hr49g\" (UID: \"8f956c33-ffd9-4517-b8d2-febf5b12f2d0\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-hr49g" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.439769 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f956c33-ffd9-4517-b8d2-febf5b12f2d0-trusted-ca\") pod \"ingress-operator-5b745b69d9-hr49g\" (UID: \"8f956c33-ffd9-4517-b8d2-febf5b12f2d0\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-hr49g" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.439790 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a64632ed-ba47-4cfb-96a9-349b32995c3a-metrics-tls\") pod \"dns-default-q75j6\" (UID: \"a64632ed-ba47-4cfb-96a9-349b32995c3a\") " pod="openshift-dns/dns-default-q75j6" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.439806 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/014a4efc-c5f5-42c9-b90f-557a51659e67-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-rrmfq\" (UID: \"014a4efc-c5f5-42c9-b90f-557a51659e67\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-rrmfq" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.439821 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/dc132003-e19f-4bdc-b77f-69dbe408b68f-mountpoint-dir\") pod \"csi-hostpathplugin-b57sr\" (UID: \"dc132003-e19f-4bdc-b77f-69dbe408b68f\") " pod="hostpath-provisioner/csi-hostpathplugin-b57sr" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.439878 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/dc132003-e19f-4bdc-b77f-69dbe408b68f-mountpoint-dir\") pod \"csi-hostpathplugin-b57sr\" (UID: \"dc132003-e19f-4bdc-b77f-69dbe408b68f\") " pod="hostpath-provisioner/csi-hostpathplugin-b57sr" Sep 29 19:10:41 crc kubenswrapper[4779]: E0929 19:10:41.439884 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 19:10:41.939858124 +0000 UTC m=+152.824283284 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.440168 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/2d999e79-a467-4f19-a67a-f5993c6b4423-secret-volume\") pod \"collect-profiles-29319540-c6vdh\" (UID: \"2d999e79-a467-4f19-a67a-f5993c6b4423\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319540-c6vdh" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.440212 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/014a4efc-c5f5-42c9-b90f-557a51659e67-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-rrmfq\" (UID: \"014a4efc-c5f5-42c9-b90f-557a51659e67\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-rrmfq" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.440242 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ttbsb\" (UniqueName: \"kubernetes.io/projected/e5039b4e-b9b6-4d60-b5b6-c5d5c6765ac8-kube-api-access-ttbsb\") pod \"service-ca-9c57cc56f-6g6c5\" (UID: \"e5039b4e-b9b6-4d60-b5b6-c5d5c6765ac8\") " pod="openshift-service-ca/service-ca-9c57cc56f-6g6c5" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.440279 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0bf68edf-b009-46c6-bdc0-05c48967d5d6-serving-cert\") pod \"service-ca-operator-777779d784-s7xfz\" (UID: \"0bf68edf-b009-46c6-bdc0-05c48967d5d6\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-s7xfz" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.440338 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x4ztn\" (UniqueName: \"kubernetes.io/projected/0b00b35e-d4f2-4be2-9bd1-e9aa6c617de7-kube-api-access-x4ztn\") pod \"machine-config-server-blwj9\" (UID: \"0b00b35e-d4f2-4be2-9bd1-e9aa6c617de7\") " pod="openshift-machine-config-operator/machine-config-server-blwj9" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.440366 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b55hr\" (UniqueName: \"kubernetes.io/projected/44f9f157-a816-4fd6-aff5-eccc66abb454-kube-api-access-b55hr\") pod \"olm-operator-6b444d44fb-kdxj8\" (UID: \"44f9f157-a816-4fd6-aff5-eccc66abb454\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-kdxj8" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.440388 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/dc132003-e19f-4bdc-b77f-69dbe408b68f-socket-dir\") pod \"csi-hostpathplugin-b57sr\" (UID: \"dc132003-e19f-4bdc-b77f-69dbe408b68f\") " pod="hostpath-provisioner/csi-hostpathplugin-b57sr" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.440417 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/0b00b35e-d4f2-4be2-9bd1-e9aa6c617de7-certs\") pod \"machine-config-server-blwj9\" (UID: \"0b00b35e-d4f2-4be2-9bd1-e9aa6c617de7\") " pod="openshift-machine-config-operator/machine-config-server-blwj9" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.440495 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/72bf75a9-d335-40b3-ad88-bc72472d4256-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-h4zfh\" (UID: \"72bf75a9-d335-40b3-ad88-bc72472d4256\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-h4zfh" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.440529 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/0b00b35e-d4f2-4be2-9bd1-e9aa6c617de7-node-bootstrap-token\") pod \"machine-config-server-blwj9\" (UID: \"0b00b35e-d4f2-4be2-9bd1-e9aa6c617de7\") " pod="openshift-machine-config-operator/machine-config-server-blwj9" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.440556 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/8f956c33-ffd9-4517-b8d2-febf5b12f2d0-metrics-tls\") pod \"ingress-operator-5b745b69d9-hr49g\" (UID: \"8f956c33-ffd9-4517-b8d2-febf5b12f2d0\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-hr49g" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.440591 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0bf68edf-b009-46c6-bdc0-05c48967d5d6-config\") pod \"service-ca-operator-777779d784-s7xfz\" (UID: \"0bf68edf-b009-46c6-bdc0-05c48967d5d6\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-s7xfz" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.440623 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/e5039b4e-b9b6-4d60-b5b6-c5d5c6765ac8-signing-cabundle\") pod \"service-ca-9c57cc56f-6g6c5\" (UID: \"e5039b4e-b9b6-4d60-b5b6-c5d5c6765ac8\") " pod="openshift-service-ca/service-ca-9c57cc56f-6g6c5" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.440646 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zhdnc\" (UniqueName: \"kubernetes.io/projected/01f30d6b-1381-4cd8-9a61-ed94d536d2a2-kube-api-access-zhdnc\") pod \"kube-storage-version-migrator-operator-b67b599dd-2n6g9\" (UID: \"01f30d6b-1381-4cd8-9a61-ed94d536d2a2\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-2n6g9" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.440670 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/014a4efc-c5f5-42c9-b90f-557a51659e67-config\") pod \"kube-controller-manager-operator-78b949d7b-rrmfq\" (UID: \"014a4efc-c5f5-42c9-b90f-557a51659e67\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-rrmfq" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.440707 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kfhlk\" (UniqueName: \"kubernetes.io/projected/7a54027e-55dc-42f9-aa21-1c82ec4d1b4b-kube-api-access-kfhlk\") pod \"packageserver-d55dfcdfc-wsbvn\" (UID: \"7a54027e-55dc-42f9-aa21-1c82ec4d1b4b\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-wsbvn" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.440731 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z5h6j\" (UniqueName: \"kubernetes.io/projected/ec0b9d1c-6776-4434-9a56-756a3fc1fc5e-kube-api-access-z5h6j\") pod \"marketplace-operator-79b997595-bt79h\" (UID: \"ec0b9d1c-6776-4434-9a56-756a3fc1fc5e\") " pod="openshift-marketplace/marketplace-operator-79b997595-bt79h" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.440758 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/7a54027e-55dc-42f9-aa21-1c82ec4d1b4b-apiservice-cert\") pod \"packageserver-d55dfcdfc-wsbvn\" (UID: \"7a54027e-55dc-42f9-aa21-1c82ec4d1b4b\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-wsbvn" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.440784 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/a64632ed-ba47-4cfb-96a9-349b32995c3a-config-volume\") pod \"dns-default-q75j6\" (UID: \"a64632ed-ba47-4cfb-96a9-349b32995c3a\") " pod="openshift-dns/dns-default-q75j6" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.440833 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w7lxg\" (UniqueName: \"kubernetes.io/projected/1209880f-ab8c-4964-8796-81bc1fdf803b-kube-api-access-w7lxg\") pod \"machine-config-controller-84d6567774-cbd6x\" (UID: \"1209880f-ab8c-4964-8796-81bc1fdf803b\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-cbd6x" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.440859 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/ec0b9d1c-6776-4434-9a56-756a3fc1fc5e-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-bt79h\" (UID: \"ec0b9d1c-6776-4434-9a56-756a3fc1fc5e\") " pod="openshift-marketplace/marketplace-operator-79b997595-bt79h" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.440884 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m7gfn\" (UniqueName: \"kubernetes.io/projected/d6ea9901-9f72-4641-b427-15ae8e7a5bfa-kube-api-access-m7gfn\") pod \"ingress-canary-vvp2h\" (UID: \"d6ea9901-9f72-4641-b427-15ae8e7a5bfa\") " pod="openshift-ingress-canary/ingress-canary-vvp2h" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.440914 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/1209880f-ab8c-4964-8796-81bc1fdf803b-proxy-tls\") pod \"machine-config-controller-84d6567774-cbd6x\" (UID: \"1209880f-ab8c-4964-8796-81bc1fdf803b\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-cbd6x" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.440937 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6dpgg\" (UniqueName: \"kubernetes.io/projected/2d999e79-a467-4f19-a67a-f5993c6b4423-kube-api-access-6dpgg\") pod \"collect-profiles-29319540-c6vdh\" (UID: \"2d999e79-a467-4f19-a67a-f5993c6b4423\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319540-c6vdh" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.440967 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/1209880f-ab8c-4964-8796-81bc1fdf803b-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-cbd6x\" (UID: \"1209880f-ab8c-4964-8796-81bc1fdf803b\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-cbd6x" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.441004 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01f30d6b-1381-4cd8-9a61-ed94d536d2a2-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-2n6g9\" (UID: \"01f30d6b-1381-4cd8-9a61-ed94d536d2a2\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-2n6g9" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.441027 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/ec0b9d1c-6776-4434-9a56-756a3fc1fc5e-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-bt79h\" (UID: \"ec0b9d1c-6776-4434-9a56-756a3fc1fc5e\") " pod="openshift-marketplace/marketplace-operator-79b997595-bt79h" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.441053 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hfpjt\" (UniqueName: \"kubernetes.io/projected/dc132003-e19f-4bdc-b77f-69dbe408b68f-kube-api-access-hfpjt\") pod \"csi-hostpathplugin-b57sr\" (UID: \"dc132003-e19f-4bdc-b77f-69dbe408b68f\") " pod="hostpath-provisioner/csi-hostpathplugin-b57sr" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.441094 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/d6ea9901-9f72-4641-b427-15ae8e7a5bfa-cert\") pod \"ingress-canary-vvp2h\" (UID: \"d6ea9901-9f72-4641-b427-15ae8e7a5bfa\") " pod="openshift-ingress-canary/ingress-canary-vvp2h" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.441128 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w6pwz\" (UniqueName: \"kubernetes.io/projected/a64632ed-ba47-4cfb-96a9-349b32995c3a-kube-api-access-w6pwz\") pod \"dns-default-q75j6\" (UID: \"a64632ed-ba47-4cfb-96a9-349b32995c3a\") " pod="openshift-dns/dns-default-q75j6" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.441160 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/7a54027e-55dc-42f9-aa21-1c82ec4d1b4b-webhook-cert\") pod \"packageserver-d55dfcdfc-wsbvn\" (UID: \"7a54027e-55dc-42f9-aa21-1c82ec4d1b4b\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-wsbvn" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.441188 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/44f9f157-a816-4fd6-aff5-eccc66abb454-srv-cert\") pod \"olm-operator-6b444d44fb-kdxj8\" (UID: \"44f9f157-a816-4fd6-aff5-eccc66abb454\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-kdxj8" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.441210 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/7a54027e-55dc-42f9-aa21-1c82ec4d1b4b-tmpfs\") pod \"packageserver-d55dfcdfc-wsbvn\" (UID: \"7a54027e-55dc-42f9-aa21-1c82ec4d1b4b\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-wsbvn" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.441233 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/dc132003-e19f-4bdc-b77f-69dbe408b68f-plugins-dir\") pod \"csi-hostpathplugin-b57sr\" (UID: \"dc132003-e19f-4bdc-b77f-69dbe408b68f\") " pod="hostpath-provisioner/csi-hostpathplugin-b57sr" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.441658 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/dc132003-e19f-4bdc-b77f-69dbe408b68f-plugins-dir\") pod \"csi-hostpathplugin-b57sr\" (UID: \"dc132003-e19f-4bdc-b77f-69dbe408b68f\") " pod="hostpath-provisioner/csi-hostpathplugin-b57sr" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.441728 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/dc132003-e19f-4bdc-b77f-69dbe408b68f-socket-dir\") pod \"csi-hostpathplugin-b57sr\" (UID: \"dc132003-e19f-4bdc-b77f-69dbe408b68f\") " pod="hostpath-provisioner/csi-hostpathplugin-b57sr" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.443525 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/014a4efc-c5f5-42c9-b90f-557a51659e67-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-rrmfq\" (UID: \"014a4efc-c5f5-42c9-b90f-557a51659e67\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-rrmfq" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.443990 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01f30d6b-1381-4cd8-9a61-ed94d536d2a2-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-2n6g9\" (UID: \"01f30d6b-1381-4cd8-9a61-ed94d536d2a2\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-2n6g9" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.445218 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"certs\" (UniqueName: \"kubernetes.io/secret/0b00b35e-d4f2-4be2-9bd1-e9aa6c617de7-certs\") pod \"machine-config-server-blwj9\" (UID: \"0b00b35e-d4f2-4be2-9bd1-e9aa6c617de7\") " pod="openshift-machine-config-operator/machine-config-server-blwj9" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.446156 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0bf68edf-b009-46c6-bdc0-05c48967d5d6-serving-cert\") pod \"service-ca-operator-777779d784-s7xfz\" (UID: \"0bf68edf-b009-46c6-bdc0-05c48967d5d6\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-s7xfz" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.446380 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/dc132003-e19f-4bdc-b77f-69dbe408b68f-csi-data-dir\") pod \"csi-hostpathplugin-b57sr\" (UID: \"dc132003-e19f-4bdc-b77f-69dbe408b68f\") " pod="hostpath-provisioner/csi-hostpathplugin-b57sr" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.446560 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/935082cd-101b-44e0-8315-186d166a1b2a-profile-collector-cert\") pod \"catalog-operator-68c6474976-6js75\" (UID: \"935082cd-101b-44e0-8315-186d166a1b2a\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-6js75" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.447608 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f956c33-ffd9-4517-b8d2-febf5b12f2d0-trusted-ca\") pod \"ingress-operator-5b745b69d9-hr49g\" (UID: \"8f956c33-ffd9-4517-b8d2-febf5b12f2d0\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-hr49g" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.447712 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/2d999e79-a467-4f19-a67a-f5993c6b4423-config-volume\") pod \"collect-profiles-29319540-c6vdh\" (UID: \"2d999e79-a467-4f19-a67a-f5993c6b4423\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319540-c6vdh" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.447845 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/935082cd-101b-44e0-8315-186d166a1b2a-srv-cert\") pod \"catalog-operator-68c6474976-6js75\" (UID: \"935082cd-101b-44e0-8315-186d166a1b2a\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-6js75" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.448048 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/dc132003-e19f-4bdc-b77f-69dbe408b68f-registration-dir\") pod \"csi-hostpathplugin-b57sr\" (UID: \"dc132003-e19f-4bdc-b77f-69dbe408b68f\") " pod="hostpath-provisioner/csi-hostpathplugin-b57sr" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.448614 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/a64632ed-ba47-4cfb-96a9-349b32995c3a-config-volume\") pod \"dns-default-q75j6\" (UID: \"a64632ed-ba47-4cfb-96a9-349b32995c3a\") " pod="openshift-dns/dns-default-q75j6" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.449103 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0bf68edf-b009-46c6-bdc0-05c48967d5d6-config\") pod \"service-ca-operator-777779d784-s7xfz\" (UID: \"0bf68edf-b009-46c6-bdc0-05c48967d5d6\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-s7xfz" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.449256 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/72bf75a9-d335-40b3-ad88-bc72472d4256-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-h4zfh\" (UID: \"72bf75a9-d335-40b3-ad88-bc72472d4256\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-h4zfh" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.449474 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/44f9f157-a816-4fd6-aff5-eccc66abb454-profile-collector-cert\") pod \"olm-operator-6b444d44fb-kdxj8\" (UID: \"44f9f157-a816-4fd6-aff5-eccc66abb454\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-kdxj8" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.450470 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a64632ed-ba47-4cfb-96a9-349b32995c3a-metrics-tls\") pod \"dns-default-q75j6\" (UID: \"a64632ed-ba47-4cfb-96a9-349b32995c3a\") " pod="openshift-dns/dns-default-q75j6" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.451106 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/e5039b4e-b9b6-4d60-b5b6-c5d5c6765ac8-signing-cabundle\") pod \"service-ca-9c57cc56f-6g6c5\" (UID: \"e5039b4e-b9b6-4d60-b5b6-c5d5c6765ac8\") " pod="openshift-service-ca/service-ca-9c57cc56f-6g6c5" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.452820 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/2d999e79-a467-4f19-a67a-f5993c6b4423-secret-volume\") pod \"collect-profiles-29319540-c6vdh\" (UID: \"2d999e79-a467-4f19-a67a-f5993c6b4423\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319540-c6vdh" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.453690 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/ec0b9d1c-6776-4434-9a56-756a3fc1fc5e-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-bt79h\" (UID: \"ec0b9d1c-6776-4434-9a56-756a3fc1fc5e\") " pod="openshift-marketplace/marketplace-operator-79b997595-bt79h" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.453737 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/e5039b4e-b9b6-4d60-b5b6-c5d5c6765ac8-signing-key\") pod \"service-ca-9c57cc56f-6g6c5\" (UID: \"e5039b4e-b9b6-4d60-b5b6-c5d5c6765ac8\") " pod="openshift-service-ca/service-ca-9c57cc56f-6g6c5" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.454112 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/8f956c33-ffd9-4517-b8d2-febf5b12f2d0-metrics-tls\") pod \"ingress-operator-5b745b69d9-hr49g\" (UID: \"8f956c33-ffd9-4517-b8d2-febf5b12f2d0\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-hr49g" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.454462 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/7a54027e-55dc-42f9-aa21-1c82ec4d1b4b-tmpfs\") pod \"packageserver-d55dfcdfc-wsbvn\" (UID: \"7a54027e-55dc-42f9-aa21-1c82ec4d1b4b\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-wsbvn" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.454712 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/1209880f-ab8c-4964-8796-81bc1fdf803b-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-cbd6x\" (UID: \"1209880f-ab8c-4964-8796-81bc1fdf803b\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-cbd6x" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.455013 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/ec0b9d1c-6776-4434-9a56-756a3fc1fc5e-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-bt79h\" (UID: \"ec0b9d1c-6776-4434-9a56-756a3fc1fc5e\") " pod="openshift-marketplace/marketplace-operator-79b997595-bt79h" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.455049 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/d6ea9901-9f72-4641-b427-15ae8e7a5bfa-cert\") pod \"ingress-canary-vvp2h\" (UID: \"d6ea9901-9f72-4641-b427-15ae8e7a5bfa\") " pod="openshift-ingress-canary/ingress-canary-vvp2h" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.455817 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tmdhq\" (UniqueName: \"kubernetes.io/projected/336740f0-4088-499f-8090-b2c86ce4bf28-kube-api-access-tmdhq\") pod \"dns-operator-744455d44c-9tvpg\" (UID: \"336740f0-4088-499f-8090-b2c86ce4bf28\") " pod="openshift-dns-operator/dns-operator-744455d44c-9tvpg" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.456868 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01f30d6b-1381-4cd8-9a61-ed94d536d2a2-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-2n6g9\" (UID: \"01f30d6b-1381-4cd8-9a61-ed94d536d2a2\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-2n6g9" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.457204 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/7a54027e-55dc-42f9-aa21-1c82ec4d1b4b-apiservice-cert\") pod \"packageserver-d55dfcdfc-wsbvn\" (UID: \"7a54027e-55dc-42f9-aa21-1c82ec4d1b4b\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-wsbvn" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.457446 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/0b00b35e-d4f2-4be2-9bd1-e9aa6c617de7-node-bootstrap-token\") pod \"machine-config-server-blwj9\" (UID: \"0b00b35e-d4f2-4be2-9bd1-e9aa6c617de7\") " pod="openshift-machine-config-operator/machine-config-server-blwj9" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.457595 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/7a54027e-55dc-42f9-aa21-1c82ec4d1b4b-webhook-cert\") pod \"packageserver-d55dfcdfc-wsbvn\" (UID: \"7a54027e-55dc-42f9-aa21-1c82ec4d1b4b\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-wsbvn" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.457830 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/44f9f157-a816-4fd6-aff5-eccc66abb454-srv-cert\") pod \"olm-operator-6b444d44fb-kdxj8\" (UID: \"44f9f157-a816-4fd6-aff5-eccc66abb454\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-kdxj8" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.457526 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/1209880f-ab8c-4964-8796-81bc1fdf803b-proxy-tls\") pod \"machine-config-controller-84d6567774-cbd6x\" (UID: \"1209880f-ab8c-4964-8796-81bc1fdf803b\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-cbd6x" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.472770 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/25ae8232-dd23-4bbd-bae1-76ab7f9ce10c-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-z2jm2\" (UID: \"25ae8232-dd23-4bbd-bae1-76ab7f9ce10c\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-z2jm2" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.491776 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nvn7f\" (UniqueName: \"kubernetes.io/projected/118f9ff6-7433-41ee-a6bd-84e14fd1ea98-kube-api-access-nvn7f\") pod \"cluster-samples-operator-665b6dd947-ksgbj\" (UID: \"118f9ff6-7433-41ee-a6bd-84e14fd1ea98\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-ksgbj" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.499204 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/014a4efc-c5f5-42c9-b90f-557a51659e67-config\") pod \"kube-controller-manager-operator-78b949d7b-rrmfq\" (UID: \"014a4efc-c5f5-42c9-b90f-557a51659e67\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-rrmfq" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.509751 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-shpqc\" (UniqueName: \"kubernetes.io/projected/b3f6a87f-c596-4bfd-9330-13b5b8fabfe4-kube-api-access-shpqc\") pod \"machine-config-operator-74547568cd-px8c9\" (UID: \"b3f6a87f-c596-4bfd-9330-13b5b8fabfe4\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-px8c9" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.528300 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-lx98g"] Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.528906 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bmhfc\" (UniqueName: \"kubernetes.io/projected/8ed9d7da-9845-4c1a-8ec8-98f610a7cc9a-kube-api-access-bmhfc\") pod \"controller-manager-879f6c89f-qlvq5\" (UID: \"8ed9d7da-9845-4c1a-8ec8-98f610a7cc9a\") " pod="openshift-controller-manager/controller-manager-879f6c89f-qlvq5" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.542079 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-tgwxw\" (UID: \"08924ea4-79d3-439f-8bdb-150f807221d9\") " pod="openshift-image-registry/image-registry-697d97f7c8-tgwxw" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.542174 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-ksgbj" Sep 29 19:10:41 crc kubenswrapper[4779]: E0929 19:10:41.542622 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 19:10:42.042608939 +0000 UTC m=+152.927034039 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-tgwxw" (UID: "08924ea4-79d3-439f-8bdb-150f807221d9") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.547888 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-frpt8\" (UniqueName: \"kubernetes.io/projected/b2767e5e-6a38-4668-95f6-f677e298c6f8-kube-api-access-frpt8\") pod \"router-default-5444994796-4bzgw\" (UID: \"b2767e5e-6a38-4668-95f6-f677e298c6f8\") " pod="openshift-ingress/router-default-5444994796-4bzgw" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.549919 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-lx98g" event={"ID":"92115ba3-0309-4777-94c9-8a19dbdfc276","Type":"ContainerStarted","Data":"6d86ef2c347a22256c51914fc2e95b14884b6dd1bd1e5048e3cad6e7814a5b3d"} Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.552951 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-8528j" event={"ID":"73ef5985-06f9-448f-ab77-7768c6b5face","Type":"ContainerStarted","Data":"73828b439344ae2973e48609b25f705ac53221b035b2abf2c5aef1e08880f3b3"} Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.557063 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-nrx92" event={"ID":"958cb75b-851b-4661-87e2-1eac8aa8e922","Type":"ContainerStarted","Data":"1e48207a1288b2b89b4b80d6261fecc6ebf09cd1c27603c46d5fe2b932f8e0df"} Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.568441 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hzsjx\" (UniqueName: \"kubernetes.io/projected/6c791206-2b2a-4372-b1ba-dc98863e8dcd-kube-api-access-hzsjx\") pod \"etcd-operator-b45778765-d5hk2\" (UID: \"6c791206-2b2a-4372-b1ba-dc98863e8dcd\") " pod="openshift-etcd-operator/etcd-operator-b45778765-d5hk2" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.585552 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-zkscm"] Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.588951 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fprlh\" (UniqueName: \"kubernetes.io/projected/fdf18ac4-14dd-4d17-9922-5db12fa07225-kube-api-access-fprlh\") pod \"machine-approver-56656f9798-n7hnm\" (UID: \"fdf18ac4-14dd-4d17-9922-5db12fa07225\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-n7hnm" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.600871 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-66qtp" Sep 29 19:10:41 crc kubenswrapper[4779]: W0929 19:10:41.608220 4779 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3490980a_3437_48fe_9cad_aaf6cf84cc16.slice/crio-d10042b177dea9afd86583169dcb08fdd2ff771b039aee223dd3287031392099 WatchSource:0}: Error finding container d10042b177dea9afd86583169dcb08fdd2ff771b039aee223dd3287031392099: Status 404 returned error can't find the container with id d10042b177dea9afd86583169dcb08fdd2ff771b039aee223dd3287031392099 Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.619539 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-n7hnm" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.620673 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-z2jm2" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.638018 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-nxttj" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.639703 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-twng6\" (UniqueName: \"kubernetes.io/projected/9e558fcb-c770-4581-a192-0f396bab99c7-kube-api-access-twng6\") pod \"authentication-operator-69f744f599-bh65l\" (UID: \"9e558fcb-c770-4581-a192-0f396bab99c7\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-bh65l" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.643020 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 19:10:41 crc kubenswrapper[4779]: E0929 19:10:41.643173 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 19:10:42.143145041 +0000 UTC m=+153.027570141 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.643442 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-tgwxw\" (UID: \"08924ea4-79d3-439f-8bdb-150f807221d9\") " pod="openshift-image-registry/image-registry-697d97f7c8-tgwxw" Sep 29 19:10:41 crc kubenswrapper[4779]: E0929 19:10:41.643899 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 19:10:42.143891589 +0000 UTC m=+153.028316689 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-tgwxw" (UID: "08924ea4-79d3-439f-8bdb-150f807221d9") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.661572 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q9n87\" (UniqueName: \"kubernetes.io/projected/29b802ef-670a-46bb-9ad3-03bddd7dc682-kube-api-access-q9n87\") pod \"oauth-openshift-558db77b4-pjjch\" (UID: \"29b802ef-670a-46bb-9ad3-03bddd7dc682\") " pod="openshift-authentication/oauth-openshift-558db77b4-pjjch" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.662654 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-pjjch" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.674797 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6dcjg\" (UniqueName: \"kubernetes.io/projected/1ae0fa99-1f97-4233-953b-fd63f9a6a418-kube-api-access-6dcjg\") pod \"console-operator-58897d9998-jh4mm\" (UID: \"1ae0fa99-1f97-4233-953b-fd63f9a6a418\") " pod="openshift-console-operator/console-operator-58897d9998-jh4mm" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.683730 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-d5hk2" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.691556 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8b4pp\" (UniqueName: \"kubernetes.io/projected/308f2878-9a95-4b6a-8d03-90c431c05c1f-kube-api-access-8b4pp\") pod \"openshift-config-operator-7777fb866f-lzrjb\" (UID: \"308f2878-9a95-4b6a-8d03-90c431c05c1f\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-lzrjb" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.702083 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-9tvpg" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.705022 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-4bzgw" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.709732 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p649g\" (UniqueName: \"kubernetes.io/projected/3197e1c4-03d1-42f1-8bee-87a97962cf70-kube-api-access-p649g\") pod \"route-controller-manager-6576b87f9c-s9rfn\" (UID: \"3197e1c4-03d1-42f1-8bee-87a97962cf70\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-s9rfn" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.718191 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-px8c9" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.725329 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-qlvq5" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.725786 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-bc7qj" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.731208 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jdgxf\" (UniqueName: \"kubernetes.io/projected/e9fff169-c8a1-4062-9a2a-ab4c1e790c07-kube-api-access-jdgxf\") pod \"control-plane-machine-set-operator-78cbb6b69f-fcw8k\" (UID: \"e9fff169-c8a1-4062-9a2a-ab4c1e790c07\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-fcw8k" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.731604 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-fcw8k" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.746252 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 19:10:41 crc kubenswrapper[4779]: E0929 19:10:41.746400 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 19:10:42.246376904 +0000 UTC m=+153.130802004 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.746811 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-tgwxw\" (UID: \"08924ea4-79d3-439f-8bdb-150f807221d9\") " pod="openshift-image-registry/image-registry-697d97f7c8-tgwxw" Sep 29 19:10:41 crc kubenswrapper[4779]: E0929 19:10:41.747369 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 19:10:42.24735304 +0000 UTC m=+153.131778140 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-tgwxw" (UID: "08924ea4-79d3-439f-8bdb-150f807221d9") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.750899 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-428jl\" (UniqueName: \"kubernetes.io/projected/08924ea4-79d3-439f-8bdb-150f807221d9-kube-api-access-428jl\") pod \"image-registry-697d97f7c8-tgwxw\" (UID: \"08924ea4-79d3-439f-8bdb-150f807221d9\") " pod="openshift-image-registry/image-registry-697d97f7c8-tgwxw" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.778728 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-ksgbj"] Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.778761 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kqr2m\" (UniqueName: \"kubernetes.io/projected/52419e1c-e6c0-4225-95bf-da711c24e399-kube-api-access-kqr2m\") pod \"console-f9d7485db-hhcst\" (UID: \"52419e1c-e6c0-4225-95bf-da711c24e399\") " pod="openshift-console/console-f9d7485db-hhcst" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.791987 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-s9rfn" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.793164 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9wgpk\" (UniqueName: \"kubernetes.io/projected/7dc6c9bd-e7c8-44c0-b65d-0e335cc3b134-kube-api-access-9wgpk\") pod \"openshift-controller-manager-operator-756b6f6bc6-85d4c\" (UID: \"7dc6c9bd-e7c8-44c0-b65d-0e335cc3b134\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-85d4c" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.793271 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-66qtp"] Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.808016 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-85d4c" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.811923 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/08924ea4-79d3-439f-8bdb-150f807221d9-bound-sa-token\") pod \"image-registry-697d97f7c8-tgwxw\" (UID: \"08924ea4-79d3-439f-8bdb-150f807221d9\") " pod="openshift-image-registry/image-registry-697d97f7c8-tgwxw" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.814994 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-jh4mm" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.837526 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cxmpw\" (UniqueName: \"kubernetes.io/projected/078d7a8c-aa40-42c3-b26e-30dd2a01cae0-kube-api-access-cxmpw\") pod \"downloads-7954f5f757-4vf95\" (UID: \"078d7a8c-aa40-42c3-b26e-30dd2a01cae0\") " pod="openshift-console/downloads-7954f5f757-4vf95" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.850163 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.850489 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bdpjp\" (UniqueName: \"kubernetes.io/projected/72bf75a9-d335-40b3-ad88-bc72472d4256-kube-api-access-bdpjp\") pod \"package-server-manager-789f6589d5-h4zfh\" (UID: \"72bf75a9-d335-40b3-ad88-bc72472d4256\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-h4zfh" Sep 29 19:10:41 crc kubenswrapper[4779]: E0929 19:10:41.850659 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 19:10:42.350639454 +0000 UTC m=+153.235064554 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.869283 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/014a4efc-c5f5-42c9-b90f-557a51659e67-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-rrmfq\" (UID: \"014a4efc-c5f5-42c9-b90f-557a51659e67\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-rrmfq" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.878219 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-lzrjb" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.887789 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-hhcst" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.890342 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b55hr\" (UniqueName: \"kubernetes.io/projected/44f9f157-a816-4fd6-aff5-eccc66abb454-kube-api-access-b55hr\") pod \"olm-operator-6b444d44fb-kdxj8\" (UID: \"44f9f157-a816-4fd6-aff5-eccc66abb454\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-kdxj8" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.923329 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ttbsb\" (UniqueName: \"kubernetes.io/projected/e5039b4e-b9b6-4d60-b5b6-c5d5c6765ac8-kube-api-access-ttbsb\") pod \"service-ca-9c57cc56f-6g6c5\" (UID: \"e5039b4e-b9b6-4d60-b5b6-c5d5c6765ac8\") " pod="openshift-service-ca/service-ca-9c57cc56f-6g6c5" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.937791 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-bh65l" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.939334 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-nxttj"] Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.942868 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f956c33-ffd9-4517-b8d2-febf5b12f2d0-bound-sa-token\") pod \"ingress-operator-5b745b69d9-hr49g\" (UID: \"8f956c33-ffd9-4517-b8d2-febf5b12f2d0\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-hr49g" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.948024 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-kdxj8" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.952072 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-tgwxw\" (UID: \"08924ea4-79d3-439f-8bdb-150f807221d9\") " pod="openshift-image-registry/image-registry-697d97f7c8-tgwxw" Sep 29 19:10:41 crc kubenswrapper[4779]: E0929 19:10:41.952462 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 19:10:42.452448934 +0000 UTC m=+153.336874034 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-tgwxw" (UID: "08924ea4-79d3-439f-8bdb-150f807221d9") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.961537 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-h4zfh" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.964516 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-4vf95" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.968411 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x4ztn\" (UniqueName: \"kubernetes.io/projected/0b00b35e-d4f2-4be2-9bd1-e9aa6c617de7-kube-api-access-x4ztn\") pod \"machine-config-server-blwj9\" (UID: \"0b00b35e-d4f2-4be2-9bd1-e9aa6c617de7\") " pod="openshift-machine-config-operator/machine-config-server-blwj9" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.973430 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-28qm7\" (UniqueName: \"kubernetes.io/projected/9b795657-72fc-4b72-9186-f0dc24678b36-kube-api-access-28qm7\") pod \"migrator-59844c95c7-fpfpb\" (UID: \"9b795657-72fc-4b72-9186-f0dc24678b36\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-fpfpb" Sep 29 19:10:41 crc kubenswrapper[4779]: I0929 19:10:41.994095 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m7gfn\" (UniqueName: \"kubernetes.io/projected/d6ea9901-9f72-4641-b427-15ae8e7a5bfa-kube-api-access-m7gfn\") pod \"ingress-canary-vvp2h\" (UID: \"d6ea9901-9f72-4641-b427-15ae8e7a5bfa\") " pod="openshift-ingress-canary/ingress-canary-vvp2h" Sep 29 19:10:42 crc kubenswrapper[4779]: I0929 19:10:42.007943 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z5h6j\" (UniqueName: \"kubernetes.io/projected/ec0b9d1c-6776-4434-9a56-756a3fc1fc5e-kube-api-access-z5h6j\") pod \"marketplace-operator-79b997595-bt79h\" (UID: \"ec0b9d1c-6776-4434-9a56-756a3fc1fc5e\") " pod="openshift-marketplace/marketplace-operator-79b997595-bt79h" Sep 29 19:10:42 crc kubenswrapper[4779]: I0929 19:10:42.016781 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-blwj9" Sep 29 19:10:42 crc kubenswrapper[4779]: I0929 19:10:42.026751 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-d5hk2"] Sep 29 19:10:42 crc kubenswrapper[4779]: I0929 19:10:42.034227 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gbdqs\" (UniqueName: \"kubernetes.io/projected/935082cd-101b-44e0-8315-186d166a1b2a-kube-api-access-gbdqs\") pod \"catalog-operator-68c6474976-6js75\" (UID: \"935082cd-101b-44e0-8315-186d166a1b2a\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-6js75" Sep 29 19:10:42 crc kubenswrapper[4779]: I0929 19:10:42.052539 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-6g6c5" Sep 29 19:10:42 crc kubenswrapper[4779]: I0929 19:10:42.052820 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 19:10:42 crc kubenswrapper[4779]: E0929 19:10:42.053224 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 19:10:42.553209195 +0000 UTC m=+153.437634295 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 19:10:42 crc kubenswrapper[4779]: I0929 19:10:42.053350 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-tgwxw\" (UID: \"08924ea4-79d3-439f-8bdb-150f807221d9\") " pod="openshift-image-registry/image-registry-697d97f7c8-tgwxw" Sep 29 19:10:42 crc kubenswrapper[4779]: E0929 19:10:42.053832 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 19:10:42.553821118 +0000 UTC m=+153.438246218 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-tgwxw" (UID: "08924ea4-79d3-439f-8bdb-150f807221d9") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 19:10:42 crc kubenswrapper[4779]: I0929 19:10:42.070654 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w7lxg\" (UniqueName: \"kubernetes.io/projected/1209880f-ab8c-4964-8796-81bc1fdf803b-kube-api-access-w7lxg\") pod \"machine-config-controller-84d6567774-cbd6x\" (UID: \"1209880f-ab8c-4964-8796-81bc1fdf803b\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-cbd6x" Sep 29 19:10:42 crc kubenswrapper[4779]: I0929 19:10:42.071671 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kfhlk\" (UniqueName: \"kubernetes.io/projected/7a54027e-55dc-42f9-aa21-1c82ec4d1b4b-kube-api-access-kfhlk\") pod \"packageserver-d55dfcdfc-wsbvn\" (UID: \"7a54027e-55dc-42f9-aa21-1c82ec4d1b4b\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-wsbvn" Sep 29 19:10:42 crc kubenswrapper[4779]: I0929 19:10:42.095380 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-vvp2h" Sep 29 19:10:42 crc kubenswrapper[4779]: I0929 19:10:42.097080 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zhdnc\" (UniqueName: \"kubernetes.io/projected/01f30d6b-1381-4cd8-9a61-ed94d536d2a2-kube-api-access-zhdnc\") pod \"kube-storage-version-migrator-operator-b67b599dd-2n6g9\" (UID: \"01f30d6b-1381-4cd8-9a61-ed94d536d2a2\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-2n6g9" Sep 29 19:10:42 crc kubenswrapper[4779]: I0929 19:10:42.109858 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w468h\" (UniqueName: \"kubernetes.io/projected/8f956c33-ffd9-4517-b8d2-febf5b12f2d0-kube-api-access-w468h\") pod \"ingress-operator-5b745b69d9-hr49g\" (UID: \"8f956c33-ffd9-4517-b8d2-febf5b12f2d0\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-hr49g" Sep 29 19:10:42 crc kubenswrapper[4779]: I0929 19:10:42.119116 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-bt79h" Sep 29 19:10:42 crc kubenswrapper[4779]: I0929 19:10:42.132048 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w6pwz\" (UniqueName: \"kubernetes.io/projected/a64632ed-ba47-4cfb-96a9-349b32995c3a-kube-api-access-w6pwz\") pod \"dns-default-q75j6\" (UID: \"a64632ed-ba47-4cfb-96a9-349b32995c3a\") " pod="openshift-dns/dns-default-q75j6" Sep 29 19:10:42 crc kubenswrapper[4779]: I0929 19:10:42.154064 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 19:10:42 crc kubenswrapper[4779]: E0929 19:10:42.154342 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 19:10:42.654328529 +0000 UTC m=+153.538753629 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 19:10:42 crc kubenswrapper[4779]: I0929 19:10:42.160194 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-rrmfq" Sep 29 19:10:42 crc kubenswrapper[4779]: I0929 19:10:42.171079 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-6js75" Sep 29 19:10:42 crc kubenswrapper[4779]: I0929 19:10:42.173801 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-pjjch"] Sep 29 19:10:42 crc kubenswrapper[4779]: I0929 19:10:42.179289 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6dpgg\" (UniqueName: \"kubernetes.io/projected/2d999e79-a467-4f19-a67a-f5993c6b4423-kube-api-access-6dpgg\") pod \"collect-profiles-29319540-c6vdh\" (UID: \"2d999e79-a467-4f19-a67a-f5993c6b4423\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319540-c6vdh" Sep 29 19:10:42 crc kubenswrapper[4779]: I0929 19:10:42.182898 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cgjmb\" (UniqueName: \"kubernetes.io/projected/0bf68edf-b009-46c6-bdc0-05c48967d5d6-kube-api-access-cgjmb\") pod \"service-ca-operator-777779d784-s7xfz\" (UID: \"0bf68edf-b009-46c6-bdc0-05c48967d5d6\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-s7xfz" Sep 29 19:10:42 crc kubenswrapper[4779]: I0929 19:10:42.187157 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-z2jm2"] Sep 29 19:10:42 crc kubenswrapper[4779]: I0929 19:10:42.191928 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hfpjt\" (UniqueName: \"kubernetes.io/projected/dc132003-e19f-4bdc-b77f-69dbe408b68f-kube-api-access-hfpjt\") pod \"csi-hostpathplugin-b57sr\" (UID: \"dc132003-e19f-4bdc-b77f-69dbe408b68f\") " pod="hostpath-provisioner/csi-hostpathplugin-b57sr" Sep 29 19:10:42 crc kubenswrapper[4779]: I0929 19:10:42.196850 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-cbd6x" Sep 29 19:10:42 crc kubenswrapper[4779]: I0929 19:10:42.201623 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-wsbvn" Sep 29 19:10:42 crc kubenswrapper[4779]: I0929 19:10:42.207957 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-hr49g" Sep 29 19:10:42 crc kubenswrapper[4779]: W0929 19:10:42.208249 4779 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod6c791206_2b2a_4372_b1ba_dc98863e8dcd.slice/crio-2ac8181128a8d2ab598985ab84349aa9e5f678eaca86f4e0d2e7261cfbad9c75 WatchSource:0}: Error finding container 2ac8181128a8d2ab598985ab84349aa9e5f678eaca86f4e0d2e7261cfbad9c75: Status 404 returned error can't find the container with id 2ac8181128a8d2ab598985ab84349aa9e5f678eaca86f4e0d2e7261cfbad9c75 Sep 29 19:10:42 crc kubenswrapper[4779]: I0929 19:10:42.216914 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-fpfpb" Sep 29 19:10:42 crc kubenswrapper[4779]: I0929 19:10:42.225245 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-s7xfz" Sep 29 19:10:42 crc kubenswrapper[4779]: I0929 19:10:42.237271 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319540-c6vdh" Sep 29 19:10:42 crc kubenswrapper[4779]: I0929 19:10:42.256279 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-tgwxw\" (UID: \"08924ea4-79d3-439f-8bdb-150f807221d9\") " pod="openshift-image-registry/image-registry-697d97f7c8-tgwxw" Sep 29 19:10:42 crc kubenswrapper[4779]: E0929 19:10:42.257025 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 19:10:42.757011721 +0000 UTC m=+153.641436821 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-tgwxw" (UID: "08924ea4-79d3-439f-8bdb-150f807221d9") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 19:10:42 crc kubenswrapper[4779]: I0929 19:10:42.310792 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-2n6g9" Sep 29 19:10:42 crc kubenswrapper[4779]: I0929 19:10:42.357455 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 19:10:42 crc kubenswrapper[4779]: E0929 19:10:42.357536 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 19:10:42.857511243 +0000 UTC m=+153.741936333 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 19:10:42 crc kubenswrapper[4779]: I0929 19:10:42.357746 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-tgwxw\" (UID: \"08924ea4-79d3-439f-8bdb-150f807221d9\") " pod="openshift-image-registry/image-registry-697d97f7c8-tgwxw" Sep 29 19:10:42 crc kubenswrapper[4779]: E0929 19:10:42.358044 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 19:10:42.858032522 +0000 UTC m=+153.742457622 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-tgwxw" (UID: "08924ea4-79d3-439f-8bdb-150f807221d9") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 19:10:42 crc kubenswrapper[4779]: I0929 19:10:42.366735 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-q75j6" Sep 29 19:10:42 crc kubenswrapper[4779]: I0929 19:10:42.387775 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-b57sr" Sep 29 19:10:42 crc kubenswrapper[4779]: I0929 19:10:42.458601 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 19:10:42 crc kubenswrapper[4779]: E0929 19:10:42.458876 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 19:10:42.958825424 +0000 UTC m=+153.843250524 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 19:10:42 crc kubenswrapper[4779]: I0929 19:10:42.459088 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-tgwxw\" (UID: \"08924ea4-79d3-439f-8bdb-150f807221d9\") " pod="openshift-image-registry/image-registry-697d97f7c8-tgwxw" Sep 29 19:10:42 crc kubenswrapper[4779]: E0929 19:10:42.459433 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 19:10:42.959422486 +0000 UTC m=+153.843847586 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-tgwxw" (UID: "08924ea4-79d3-439f-8bdb-150f807221d9") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 19:10:42 crc kubenswrapper[4779]: I0929 19:10:42.559991 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 19:10:42 crc kubenswrapper[4779]: E0929 19:10:42.560787 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 19:10:43.060762659 +0000 UTC m=+153.945187759 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 19:10:42 crc kubenswrapper[4779]: I0929 19:10:42.567134 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-66qtp" event={"ID":"0ab1562e-a39e-4ddf-95ee-cf6ff520883d","Type":"ContainerStarted","Data":"ba90a7c6a271a49e330355356e2dac4ae824c4b65204ffb03241dab5bf5bf230"} Sep 29 19:10:42 crc kubenswrapper[4779]: I0929 19:10:42.575885 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-d5hk2" event={"ID":"6c791206-2b2a-4372-b1ba-dc98863e8dcd","Type":"ContainerStarted","Data":"2ac8181128a8d2ab598985ab84349aa9e5f678eaca86f4e0d2e7261cfbad9c75"} Sep 29 19:10:42 crc kubenswrapper[4779]: I0929 19:10:42.584519 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-4bzgw" event={"ID":"b2767e5e-6a38-4668-95f6-f677e298c6f8","Type":"ContainerStarted","Data":"5852404ed0bf5e79d780d88e2abb537266c35dc3f1f738d852e90bb245606565"} Sep 29 19:10:42 crc kubenswrapper[4779]: I0929 19:10:42.594062 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-nxttj" event={"ID":"05826cf2-7094-46a7-a08f-8f39f5fb3520","Type":"ContainerStarted","Data":"3d3fb0df8a00125b64baee886f9f35bf30c5f42a5e8c442204b7630b32084a0e"} Sep 29 19:10:42 crc kubenswrapper[4779]: I0929 19:10:42.603728 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-nrx92" event={"ID":"958cb75b-851b-4661-87e2-1eac8aa8e922","Type":"ContainerStarted","Data":"6b75d7b2ce43501a7dd35d99d68e7932d9bc33d28989e40fcc24989cee2114c5"} Sep 29 19:10:42 crc kubenswrapper[4779]: I0929 19:10:42.607647 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-n7hnm" event={"ID":"fdf18ac4-14dd-4d17-9922-5db12fa07225","Type":"ContainerStarted","Data":"40a08ee448b0374cc881abaedf6bdab037e52f509cd9f8f17ecc1f68d4211eca"} Sep 29 19:10:42 crc kubenswrapper[4779]: I0929 19:10:42.615715 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-z2jm2" event={"ID":"25ae8232-dd23-4bbd-bae1-76ab7f9ce10c","Type":"ContainerStarted","Data":"0c4cf84f2d1e3479fa9dceee41a4bcad7609d72661ec51be48bae72dd8bacc66"} Sep 29 19:10:42 crc kubenswrapper[4779]: I0929 19:10:42.624918 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-pjjch" event={"ID":"29b802ef-670a-46bb-9ad3-03bddd7dc682","Type":"ContainerStarted","Data":"0bfb210ed5a26c042ab13a2be45b552c5c63cca89f2b16d9380f7e2f51bd54dc"} Sep 29 19:10:42 crc kubenswrapper[4779]: I0929 19:10:42.627909 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-zkscm" event={"ID":"3490980a-3437-48fe-9cad-aaf6cf84cc16","Type":"ContainerStarted","Data":"66ec7c034209a6bb5426ddd4b835beab608cc1b7a1f8924508c98d02fff5f249"} Sep 29 19:10:42 crc kubenswrapper[4779]: I0929 19:10:42.627935 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-zkscm" event={"ID":"3490980a-3437-48fe-9cad-aaf6cf84cc16","Type":"ContainerStarted","Data":"d10042b177dea9afd86583169dcb08fdd2ff771b039aee223dd3287031392099"} Sep 29 19:10:42 crc kubenswrapper[4779]: I0929 19:10:42.654956 4779 generic.go:334] "Generic (PLEG): container finished" podID="92115ba3-0309-4777-94c9-8a19dbdfc276" containerID="8b1fc8f88e3833816f439140e205e3dcd886e79a07a6bae118d5593c7b0923f9" exitCode=0 Sep 29 19:10:42 crc kubenswrapper[4779]: I0929 19:10:42.657211 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-lx98g" event={"ID":"92115ba3-0309-4777-94c9-8a19dbdfc276","Type":"ContainerDied","Data":"8b1fc8f88e3833816f439140e205e3dcd886e79a07a6bae118d5593c7b0923f9"} Sep 29 19:10:42 crc kubenswrapper[4779]: I0929 19:10:42.660583 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-qlvq5"] Sep 29 19:10:42 crc kubenswrapper[4779]: I0929 19:10:42.661453 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-tgwxw\" (UID: \"08924ea4-79d3-439f-8bdb-150f807221d9\") " pod="openshift-image-registry/image-registry-697d97f7c8-tgwxw" Sep 29 19:10:42 crc kubenswrapper[4779]: E0929 19:10:42.661728 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 19:10:43.161718047 +0000 UTC m=+154.046143147 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-tgwxw" (UID: "08924ea4-79d3-439f-8bdb-150f807221d9") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 19:10:42 crc kubenswrapper[4779]: I0929 19:10:42.679472 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-blwj9" event={"ID":"0b00b35e-d4f2-4be2-9bd1-e9aa6c617de7","Type":"ContainerStarted","Data":"ad85ce024ac44b7e65f9127de9249fdffb0d21e110c9b6e28c0d36d0dcbd1e0b"} Sep 29 19:10:42 crc kubenswrapper[4779]: I0929 19:10:42.685389 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-8528j" event={"ID":"73ef5985-06f9-448f-ab77-7768c6b5face","Type":"ContainerStarted","Data":"f8debf55d1650b51b5eb1f3b2edb2d02ef99256ab188ab7c300bc63945c5eba7"} Sep 29 19:10:42 crc kubenswrapper[4779]: I0929 19:10:42.733250 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-9tvpg"] Sep 29 19:10:42 crc kubenswrapper[4779]: I0929 19:10:42.739043 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-px8c9"] Sep 29 19:10:42 crc kubenswrapper[4779]: I0929 19:10:42.764478 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 19:10:42 crc kubenswrapper[4779]: E0929 19:10:42.765505 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 19:10:43.265483609 +0000 UTC m=+154.149908709 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 19:10:42 crc kubenswrapper[4779]: W0929 19:10:42.770720 4779 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod8ed9d7da_9845_4c1a_8ec8_98f610a7cc9a.slice/crio-5a6472b7d5d60a9aab5e97607758595d7625f339234bcb9c343878ccbbd6bc73 WatchSource:0}: Error finding container 5a6472b7d5d60a9aab5e97607758595d7625f339234bcb9c343878ccbbd6bc73: Status 404 returned error can't find the container with id 5a6472b7d5d60a9aab5e97607758595d7625f339234bcb9c343878ccbbd6bc73 Sep 29 19:10:42 crc kubenswrapper[4779]: W0929 19:10:42.839751 4779 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb3f6a87f_c596_4bfd_9330_13b5b8fabfe4.slice/crio-5a15138e722e4135e9c4fbccdb0e3e28a3165c406a2c010adb71de853bc37105 WatchSource:0}: Error finding container 5a15138e722e4135e9c4fbccdb0e3e28a3165c406a2c010adb71de853bc37105: Status 404 returned error can't find the container with id 5a15138e722e4135e9c4fbccdb0e3e28a3165c406a2c010adb71de853bc37105 Sep 29 19:10:42 crc kubenswrapper[4779]: W0929 19:10:42.844005 4779 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod336740f0_4088_499f_8090_b2c86ce4bf28.slice/crio-bff66fe3d5dd28455e4f15ccc33e95571221d19834fc09f9b77fde0fa4c16293 WatchSource:0}: Error finding container bff66fe3d5dd28455e4f15ccc33e95571221d19834fc09f9b77fde0fa4c16293: Status 404 returned error can't find the container with id bff66fe3d5dd28455e4f15ccc33e95571221d19834fc09f9b77fde0fa4c16293 Sep 29 19:10:42 crc kubenswrapper[4779]: I0929 19:10:42.865817 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-tgwxw\" (UID: \"08924ea4-79d3-439f-8bdb-150f807221d9\") " pod="openshift-image-registry/image-registry-697d97f7c8-tgwxw" Sep 29 19:10:42 crc kubenswrapper[4779]: E0929 19:10:42.871631 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 19:10:43.371595508 +0000 UTC m=+154.256020608 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-tgwxw" (UID: "08924ea4-79d3-439f-8bdb-150f807221d9") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 19:10:42 crc kubenswrapper[4779]: I0929 19:10:42.971505 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 19:10:42 crc kubenswrapper[4779]: E0929 19:10:42.971850 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 19:10:43.471824259 +0000 UTC m=+154.356249359 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 19:10:42 crc kubenswrapper[4779]: I0929 19:10:42.971919 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-tgwxw\" (UID: \"08924ea4-79d3-439f-8bdb-150f807221d9\") " pod="openshift-image-registry/image-registry-697d97f7c8-tgwxw" Sep 29 19:10:42 crc kubenswrapper[4779]: E0929 19:10:42.980802 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 19:10:43.48077162 +0000 UTC m=+154.365196720 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-tgwxw" (UID: "08924ea4-79d3-439f-8bdb-150f807221d9") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 19:10:43 crc kubenswrapper[4779]: I0929 19:10:43.072989 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 19:10:43 crc kubenswrapper[4779]: E0929 19:10:43.073412 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 19:10:43.57339861 +0000 UTC m=+154.457823710 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 19:10:43 crc kubenswrapper[4779]: I0929 19:10:43.176898 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-tgwxw\" (UID: \"08924ea4-79d3-439f-8bdb-150f807221d9\") " pod="openshift-image-registry/image-registry-697d97f7c8-tgwxw" Sep 29 19:10:43 crc kubenswrapper[4779]: E0929 19:10:43.177173 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 19:10:43.677162972 +0000 UTC m=+154.561588072 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-tgwxw" (UID: "08924ea4-79d3-439f-8bdb-150f807221d9") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 19:10:43 crc kubenswrapper[4779]: I0929 19:10:43.278612 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 19:10:43 crc kubenswrapper[4779]: E0929 19:10:43.278911 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 19:10:43.778896346 +0000 UTC m=+154.663321446 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 19:10:43 crc kubenswrapper[4779]: I0929 19:10:43.348135 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-h4zfh"] Sep 29 19:10:43 crc kubenswrapper[4779]: I0929 19:10:43.355178 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-s9rfn"] Sep 29 19:10:43 crc kubenswrapper[4779]: W0929 19:10:43.362416 4779 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod72bf75a9_d335_40b3_ad88_bc72472d4256.slice/crio-f7f18e1765190cb3a6a99c302d33709183718e2aeec29db428ed821aaf4820ae WatchSource:0}: Error finding container f7f18e1765190cb3a6a99c302d33709183718e2aeec29db428ed821aaf4820ae: Status 404 returned error can't find the container with id f7f18e1765190cb3a6a99c302d33709183718e2aeec29db428ed821aaf4820ae Sep 29 19:10:43 crc kubenswrapper[4779]: I0929 19:10:43.372037 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-kdxj8"] Sep 29 19:10:43 crc kubenswrapper[4779]: I0929 19:10:43.379830 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-tgwxw\" (UID: \"08924ea4-79d3-439f-8bdb-150f807221d9\") " pod="openshift-image-registry/image-registry-697d97f7c8-tgwxw" Sep 29 19:10:43 crc kubenswrapper[4779]: E0929 19:10:43.380184 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 19:10:43.880167953 +0000 UTC m=+154.764593053 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-tgwxw" (UID: "08924ea4-79d3-439f-8bdb-150f807221d9") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 19:10:43 crc kubenswrapper[4779]: I0929 19:10:43.396535 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-4vf95"] Sep 29 19:10:43 crc kubenswrapper[4779]: I0929 19:10:43.420607 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-85d4c"] Sep 29 19:10:43 crc kubenswrapper[4779]: I0929 19:10:43.423848 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-bc7qj"] Sep 29 19:10:43 crc kubenswrapper[4779]: I0929 19:10:43.427438 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-fcw8k"] Sep 29 19:10:43 crc kubenswrapper[4779]: W0929 19:10:43.431684 4779 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7dc6c9bd_e7c8_44c0_b65d_0e335cc3b134.slice/crio-6c3eebc52666ca762fec5d63f36cafb97d4e555949610e58ffda2bc04cdb5a6c WatchSource:0}: Error finding container 6c3eebc52666ca762fec5d63f36cafb97d4e555949610e58ffda2bc04cdb5a6c: Status 404 returned error can't find the container with id 6c3eebc52666ca762fec5d63f36cafb97d4e555949610e58ffda2bc04cdb5a6c Sep 29 19:10:43 crc kubenswrapper[4779]: W0929 19:10:43.469237 4779 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode9fff169_c8a1_4062_9a2a_ab4c1e790c07.slice/crio-286c58b248a495f8fdff5e27385724e70a16068a44e958a13b158754f60b9dcb WatchSource:0}: Error finding container 286c58b248a495f8fdff5e27385724e70a16068a44e958a13b158754f60b9dcb: Status 404 returned error can't find the container with id 286c58b248a495f8fdff5e27385724e70a16068a44e958a13b158754f60b9dcb Sep 29 19:10:43 crc kubenswrapper[4779]: I0929 19:10:43.481923 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 19:10:43 crc kubenswrapper[4779]: E0929 19:10:43.482069 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 19:10:43.982041986 +0000 UTC m=+154.866467086 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 19:10:43 crc kubenswrapper[4779]: I0929 19:10:43.491185 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-tgwxw\" (UID: \"08924ea4-79d3-439f-8bdb-150f807221d9\") " pod="openshift-image-registry/image-registry-697d97f7c8-tgwxw" Sep 29 19:10:43 crc kubenswrapper[4779]: E0929 19:10:43.492099 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 19:10:43.992082709 +0000 UTC m=+154.876507809 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-tgwxw" (UID: "08924ea4-79d3-439f-8bdb-150f807221d9") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 19:10:43 crc kubenswrapper[4779]: I0929 19:10:43.500960 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-jh4mm"] Sep 29 19:10:43 crc kubenswrapper[4779]: W0929 19:10:43.537673 4779 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1ae0fa99_1f97_4233_953b_fd63f9a6a418.slice/crio-a2aac9f0f384011c472e5ff458ead71fb052f508051bc918f1aae6755afda1f7 WatchSource:0}: Error finding container a2aac9f0f384011c472e5ff458ead71fb052f508051bc918f1aae6755afda1f7: Status 404 returned error can't find the container with id a2aac9f0f384011c472e5ff458ead71fb052f508051bc918f1aae6755afda1f7 Sep 29 19:10:43 crc kubenswrapper[4779]: I0929 19:10:43.568483 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-bh65l"] Sep 29 19:10:43 crc kubenswrapper[4779]: I0929 19:10:43.571263 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-lzrjb"] Sep 29 19:10:43 crc kubenswrapper[4779]: I0929 19:10:43.577857 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-bt79h"] Sep 29 19:10:43 crc kubenswrapper[4779]: I0929 19:10:43.591514 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-hhcst"] Sep 29 19:10:43 crc kubenswrapper[4779]: I0929 19:10:43.595004 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-hr49g"] Sep 29 19:10:43 crc kubenswrapper[4779]: E0929 19:10:43.601618 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 19:10:44.101601759 +0000 UTC m=+154.986026859 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 19:10:43 crc kubenswrapper[4779]: I0929 19:10:43.601648 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 19:10:43 crc kubenswrapper[4779]: I0929 19:10:43.602452 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-tgwxw\" (UID: \"08924ea4-79d3-439f-8bdb-150f807221d9\") " pod="openshift-image-registry/image-registry-697d97f7c8-tgwxw" Sep 29 19:10:43 crc kubenswrapper[4779]: E0929 19:10:43.603055 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 19:10:44.103026378 +0000 UTC m=+154.987451478 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-tgwxw" (UID: "08924ea4-79d3-439f-8bdb-150f807221d9") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 19:10:43 crc kubenswrapper[4779]: I0929 19:10:43.698664 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-px8c9" event={"ID":"b3f6a87f-c596-4bfd-9330-13b5b8fabfe4","Type":"ContainerStarted","Data":"9c5151c5e8751228a2053e489d8a1e4a35444b7df2803a08dad07f90230c7438"} Sep 29 19:10:43 crc kubenswrapper[4779]: I0929 19:10:43.698828 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-px8c9" event={"ID":"b3f6a87f-c596-4bfd-9330-13b5b8fabfe4","Type":"ContainerStarted","Data":"5a15138e722e4135e9c4fbccdb0e3e28a3165c406a2c010adb71de853bc37105"} Sep 29 19:10:43 crc kubenswrapper[4779]: I0929 19:10:43.703584 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 19:10:43 crc kubenswrapper[4779]: E0929 19:10:43.703685 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 19:10:44.203666808 +0000 UTC m=+155.088091908 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 19:10:43 crc kubenswrapper[4779]: I0929 19:10:43.703984 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-tgwxw\" (UID: \"08924ea4-79d3-439f-8bdb-150f807221d9\") " pod="openshift-image-registry/image-registry-697d97f7c8-tgwxw" Sep 29 19:10:43 crc kubenswrapper[4779]: E0929 19:10:43.704390 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 19:10:44.204378977 +0000 UTC m=+155.088804077 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-tgwxw" (UID: "08924ea4-79d3-439f-8bdb-150f807221d9") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 19:10:43 crc kubenswrapper[4779]: I0929 19:10:43.713970 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-fcw8k" event={"ID":"e9fff169-c8a1-4062-9a2a-ab4c1e790c07","Type":"ContainerStarted","Data":"286c58b248a495f8fdff5e27385724e70a16068a44e958a13b158754f60b9dcb"} Sep 29 19:10:43 crc kubenswrapper[4779]: I0929 19:10:43.719103 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-6js75"] Sep 29 19:10:43 crc kubenswrapper[4779]: I0929 19:10:43.727948 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-vvp2h"] Sep 29 19:10:43 crc kubenswrapper[4779]: I0929 19:10:43.729549 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-8528j" podStartSLOduration=134.729535609 podStartE2EDuration="2m14.729535609s" podCreationTimestamp="2025-09-29 19:08:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 19:10:43.727492574 +0000 UTC m=+154.611917674" watchObservedRunningTime="2025-09-29 19:10:43.729535609 +0000 UTC m=+154.613960709" Sep 29 19:10:43 crc kubenswrapper[4779]: I0929 19:10:43.730695 4779 generic.go:334] "Generic (PLEG): container finished" podID="05826cf2-7094-46a7-a08f-8f39f5fb3520" containerID="665a908a808c116caf1aba2e0914faed42957daf31b06a730d409d16c590fa2c" exitCode=0 Sep 29 19:10:43 crc kubenswrapper[4779]: I0929 19:10:43.730775 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-nxttj" event={"ID":"05826cf2-7094-46a7-a08f-8f39f5fb3520","Type":"ContainerDied","Data":"665a908a808c116caf1aba2e0914faed42957daf31b06a730d409d16c590fa2c"} Sep 29 19:10:43 crc kubenswrapper[4779]: I0929 19:10:43.746911 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-cbd6x"] Sep 29 19:10:43 crc kubenswrapper[4779]: I0929 19:10:43.748599 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-s7xfz"] Sep 29 19:10:43 crc kubenswrapper[4779]: I0929 19:10:43.756813 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-bh65l" event={"ID":"9e558fcb-c770-4581-a192-0f396bab99c7","Type":"ContainerStarted","Data":"2270514c79c2ad4e3d7dc7881b2d031cdd5c9184328d50991da3a079f091deda"} Sep 29 19:10:43 crc kubenswrapper[4779]: I0929 19:10:43.758284 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-6g6c5"] Sep 29 19:10:43 crc kubenswrapper[4779]: I0929 19:10:43.758809 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-ksgbj" event={"ID":"118f9ff6-7433-41ee-a6bd-84e14fd1ea98","Type":"ContainerStarted","Data":"2c6bd38f19f9186bb7f9c6f5784a497d59f5daa8aa7ea00826f916725bf4d4bb"} Sep 29 19:10:43 crc kubenswrapper[4779]: I0929 19:10:43.758835 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-ksgbj" event={"ID":"118f9ff6-7433-41ee-a6bd-84e14fd1ea98","Type":"ContainerStarted","Data":"1c82568af2a6dbcecb9894b6c0cd8cc8733840cc345b709d552906016650a713"} Sep 29 19:10:43 crc kubenswrapper[4779]: I0929 19:10:43.785484 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-zkscm" podStartSLOduration=134.785470576 podStartE2EDuration="2m14.785470576s" podCreationTimestamp="2025-09-29 19:08:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 19:10:43.783553024 +0000 UTC m=+154.667978124" watchObservedRunningTime="2025-09-29 19:10:43.785470576 +0000 UTC m=+154.669895676" Sep 29 19:10:43 crc kubenswrapper[4779]: I0929 19:10:43.786122 4779 patch_prober.go:28] interesting pod/machine-config-daemon-d5cnr container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 19:10:43 crc kubenswrapper[4779]: I0929 19:10:43.786157 4779 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" podUID="476bc421-1113-455e-bcc8-e207e47dad19" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 19:10:43 crc kubenswrapper[4779]: I0929 19:10:43.793658 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-h4zfh" event={"ID":"72bf75a9-d335-40b3-ad88-bc72472d4256","Type":"ContainerStarted","Data":"62d2bb431a1767dfb0fbc68b368cfa12f855cce0560566cb8eabe8ace9520300"} Sep 29 19:10:43 crc kubenswrapper[4779]: I0929 19:10:43.793711 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-h4zfh" event={"ID":"72bf75a9-d335-40b3-ad88-bc72472d4256","Type":"ContainerStarted","Data":"f7f18e1765190cb3a6a99c302d33709183718e2aeec29db428ed821aaf4820ae"} Sep 29 19:10:43 crc kubenswrapper[4779]: I0929 19:10:43.793728 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319540-c6vdh"] Sep 29 19:10:43 crc kubenswrapper[4779]: I0929 19:10:43.793804 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-fpfpb"] Sep 29 19:10:43 crc kubenswrapper[4779]: W0929 19:10:43.800559 4779 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod935082cd_101b_44e0_8315_186d166a1b2a.slice/crio-a68626c8d03a160b9cf77f43eaf71228a50e380f1c1d41068ce3ab8f2114c767 WatchSource:0}: Error finding container a68626c8d03a160b9cf77f43eaf71228a50e380f1c1d41068ce3ab8f2114c767: Status 404 returned error can't find the container with id a68626c8d03a160b9cf77f43eaf71228a50e380f1c1d41068ce3ab8f2114c767 Sep 29 19:10:43 crc kubenswrapper[4779]: I0929 19:10:43.801301 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-lzrjb" event={"ID":"308f2878-9a95-4b6a-8d03-90c431c05c1f","Type":"ContainerStarted","Data":"83bb3f17c048ac68a709fd31e98e9593908ae4a824f899a45231ed181bf9201c"} Sep 29 19:10:43 crc kubenswrapper[4779]: I0929 19:10:43.803583 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-q75j6"] Sep 29 19:10:43 crc kubenswrapper[4779]: I0929 19:10:43.804951 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 19:10:43 crc kubenswrapper[4779]: I0929 19:10:43.805111 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-2n6g9"] Sep 29 19:10:43 crc kubenswrapper[4779]: E0929 19:10:43.805306 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 19:10:44.305291044 +0000 UTC m=+155.189716144 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 19:10:43 crc kubenswrapper[4779]: I0929 19:10:43.805431 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-tgwxw\" (UID: \"08924ea4-79d3-439f-8bdb-150f807221d9\") " pod="openshift-image-registry/image-registry-697d97f7c8-tgwxw" Sep 29 19:10:43 crc kubenswrapper[4779]: E0929 19:10:43.805698 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 19:10:44.305691725 +0000 UTC m=+155.190116825 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-tgwxw" (UID: "08924ea4-79d3-439f-8bdb-150f807221d9") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 19:10:43 crc kubenswrapper[4779]: I0929 19:10:43.809521 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-4vf95" event={"ID":"078d7a8c-aa40-42c3-b26e-30dd2a01cae0","Type":"ContainerStarted","Data":"620c255b1fcff01b72b3cdba5989bda2348a96449caa3d29eac1eb26cf5eed70"} Sep 29 19:10:43 crc kubenswrapper[4779]: I0929 19:10:43.810406 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-rrmfq"] Sep 29 19:10:43 crc kubenswrapper[4779]: I0929 19:10:43.814089 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-qlvq5" event={"ID":"8ed9d7da-9845-4c1a-8ec8-98f610a7cc9a","Type":"ContainerStarted","Data":"5ebe0ee1ae511cf13e2ac9b802f9f61f7e92082bb38e9022519e610757de4faf"} Sep 29 19:10:43 crc kubenswrapper[4779]: I0929 19:10:43.814480 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-879f6c89f-qlvq5" Sep 29 19:10:43 crc kubenswrapper[4779]: I0929 19:10:43.814492 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-qlvq5" event={"ID":"8ed9d7da-9845-4c1a-8ec8-98f610a7cc9a","Type":"ContainerStarted","Data":"5a6472b7d5d60a9aab5e97607758595d7625f339234bcb9c343878ccbbd6bc73"} Sep 29 19:10:43 crc kubenswrapper[4779]: I0929 19:10:43.817623 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-66qtp" event={"ID":"0ab1562e-a39e-4ddf-95ee-cf6ff520883d","Type":"ContainerStarted","Data":"c8204eba8e309619d5c1357a4dccfde11b4cc48688e11069c5db4793680a477d"} Sep 29 19:10:43 crc kubenswrapper[4779]: I0929 19:10:43.817656 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-66qtp" event={"ID":"0ab1562e-a39e-4ddf-95ee-cf6ff520883d","Type":"ContainerStarted","Data":"f27938c6217d25f796b87e0eb262c20394a1e5fbf3ba2485e81852b123456a57"} Sep 29 19:10:43 crc kubenswrapper[4779]: I0929 19:10:43.819665 4779 patch_prober.go:28] interesting pod/controller-manager-879f6c89f-qlvq5 container/controller-manager namespace/openshift-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.5:8443/healthz\": dial tcp 10.217.0.5:8443: connect: connection refused" start-of-body= Sep 29 19:10:43 crc kubenswrapper[4779]: I0929 19:10:43.819697 4779 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-controller-manager/controller-manager-879f6c89f-qlvq5" podUID="8ed9d7da-9845-4c1a-8ec8-98f610a7cc9a" containerName="controller-manager" probeResult="failure" output="Get \"https://10.217.0.5:8443/healthz\": dial tcp 10.217.0.5:8443: connect: connection refused" Sep 29 19:10:43 crc kubenswrapper[4779]: I0929 19:10:43.825761 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-d5hk2" event={"ID":"6c791206-2b2a-4372-b1ba-dc98863e8dcd","Type":"ContainerStarted","Data":"3308ddea09413c5b8d3991f9ed841b61af908aef7ba6b373cb0349916691b350"} Sep 29 19:10:43 crc kubenswrapper[4779]: I0929 19:10:43.830076 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-lx98g" event={"ID":"92115ba3-0309-4777-94c9-8a19dbdfc276","Type":"ContainerStarted","Data":"6b11eddf12520469e7a88b2945266dc779794a7eef17fb6b336e07321306a20e"} Sep 29 19:10:43 crc kubenswrapper[4779]: I0929 19:10:43.837966 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-n7hnm" event={"ID":"fdf18ac4-14dd-4d17-9922-5db12fa07225","Type":"ContainerStarted","Data":"6bd57ab9f413e6da779bf6dea5746ce66d91cfeae9e6ada743d8b256e0acf5a1"} Sep 29 19:10:43 crc kubenswrapper[4779]: I0929 19:10:43.838005 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-n7hnm" event={"ID":"fdf18ac4-14dd-4d17-9922-5db12fa07225","Type":"ContainerStarted","Data":"00184be9f0deb359f4e28d9e21966f146b30bd47aca8e6555957be301d48d8be"} Sep 29 19:10:43 crc kubenswrapper[4779]: I0929 19:10:43.839749 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-s9rfn" event={"ID":"3197e1c4-03d1-42f1-8bee-87a97962cf70","Type":"ContainerStarted","Data":"d8509e6555061139b1cde95100700ea82f9cb6f30680698f144adeffb580a46c"} Sep 29 19:10:43 crc kubenswrapper[4779]: I0929 19:10:43.839804 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-s9rfn" event={"ID":"3197e1c4-03d1-42f1-8bee-87a97962cf70","Type":"ContainerStarted","Data":"349290492a5f8bc336f70b7acb78d292b9164a86b02f61aab5619cf841d8a75c"} Sep 29 19:10:43 crc kubenswrapper[4779]: I0929 19:10:43.841106 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-85d4c" event={"ID":"7dc6c9bd-e7c8-44c0-b65d-0e335cc3b134","Type":"ContainerStarted","Data":"6c3eebc52666ca762fec5d63f36cafb97d4e555949610e58ffda2bc04cdb5a6c"} Sep 29 19:10:43 crc kubenswrapper[4779]: I0929 19:10:43.845612 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-bc7qj" event={"ID":"fd9b8ba1-513d-4c37-aba0-ce6856f5f55c","Type":"ContainerStarted","Data":"45720b0948818f47e8c8fd62230b7c1fd6fbe3ae5f55b48227d6f029462c227f"} Sep 29 19:10:43 crc kubenswrapper[4779]: W0929 19:10:43.845715 4779 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod014a4efc_c5f5_42c9_b90f_557a51659e67.slice/crio-bbc2fbd0075ae905adad7e4b2e63dca2d83f94f2af81c58fa0cb1da0a7f7bc1b WatchSource:0}: Error finding container bbc2fbd0075ae905adad7e4b2e63dca2d83f94f2af81c58fa0cb1da0a7f7bc1b: Status 404 returned error can't find the container with id bbc2fbd0075ae905adad7e4b2e63dca2d83f94f2af81c58fa0cb1da0a7f7bc1b Sep 29 19:10:43 crc kubenswrapper[4779]: I0929 19:10:43.848281 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-kdxj8" event={"ID":"44f9f157-a816-4fd6-aff5-eccc66abb454","Type":"ContainerStarted","Data":"c527685b85b8beffc44f969696cea58e8d662d77e43b979ec01726e28f6dad71"} Sep 29 19:10:43 crc kubenswrapper[4779]: I0929 19:10:43.849816 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-bt79h" event={"ID":"ec0b9d1c-6776-4434-9a56-756a3fc1fc5e","Type":"ContainerStarted","Data":"9fccf9ae097860af07f76a2699b7b305643752e55f7e5c371b1b49ba23e374fa"} Sep 29 19:10:43 crc kubenswrapper[4779]: I0929 19:10:43.851202 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-blwj9" event={"ID":"0b00b35e-d4f2-4be2-9bd1-e9aa6c617de7","Type":"ContainerStarted","Data":"2687d57ba19de532ec7515e4497a21b2a32926aaff8335c4de8e4376cfd6041a"} Sep 29 19:10:43 crc kubenswrapper[4779]: I0929 19:10:43.858763 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-nrx92" event={"ID":"958cb75b-851b-4661-87e2-1eac8aa8e922","Type":"ContainerStarted","Data":"febe6b7dfb0b3df8d9ada68031cb8284a221073e0e6e5436e4cfcfffdd73c2ab"} Sep 29 19:10:43 crc kubenswrapper[4779]: I0929 19:10:43.860886 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-879f6c89f-qlvq5" podStartSLOduration=134.860863351 podStartE2EDuration="2m14.860863351s" podCreationTimestamp="2025-09-29 19:08:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 19:10:43.859631188 +0000 UTC m=+154.744056298" watchObservedRunningTime="2025-09-29 19:10:43.860863351 +0000 UTC m=+154.745288461" Sep 29 19:10:43 crc kubenswrapper[4779]: I0929 19:10:43.878585 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-hhcst" event={"ID":"52419e1c-e6c0-4225-95bf-da711c24e399","Type":"ContainerStarted","Data":"bfa5ff66d48704f02e9adaab4ff4ada4788ea0ca5db9085684e0c70f091481cd"} Sep 29 19:10:43 crc kubenswrapper[4779]: W0929 19:10:43.884149 4779 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9b795657_72fc_4b72_9186_f0dc24678b36.slice/crio-5fea85a5d1fb2dc38252dab52b9bbf8d590ea5aef89f8f77f2192df7c54b2633 WatchSource:0}: Error finding container 5fea85a5d1fb2dc38252dab52b9bbf8d590ea5aef89f8f77f2192df7c54b2633: Status 404 returned error can't find the container with id 5fea85a5d1fb2dc38252dab52b9bbf8d590ea5aef89f8f77f2192df7c54b2633 Sep 29 19:10:43 crc kubenswrapper[4779]: I0929 19:10:43.884516 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-pjjch" event={"ID":"29b802ef-670a-46bb-9ad3-03bddd7dc682","Type":"ContainerStarted","Data":"13296d5bc1c28085d42c4d36090b3dc006a5af303010df3d36798f8679946979"} Sep 29 19:10:43 crc kubenswrapper[4779]: I0929 19:10:43.884951 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-558db77b4-pjjch" Sep 29 19:10:43 crc kubenswrapper[4779]: I0929 19:10:43.886719 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-jh4mm" event={"ID":"1ae0fa99-1f97-4233-953b-fd63f9a6a418","Type":"ContainerStarted","Data":"a2aac9f0f384011c472e5ff458ead71fb052f508051bc918f1aae6755afda1f7"} Sep 29 19:10:43 crc kubenswrapper[4779]: I0929 19:10:43.887601 4779 patch_prober.go:28] interesting pod/oauth-openshift-558db77b4-pjjch container/oauth-openshift namespace/openshift-authentication: Readiness probe status=failure output="Get \"https://10.217.0.14:6443/healthz\": dial tcp 10.217.0.14:6443: connect: connection refused" start-of-body= Sep 29 19:10:43 crc kubenswrapper[4779]: I0929 19:10:43.887623 4779 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-authentication/oauth-openshift-558db77b4-pjjch" podUID="29b802ef-670a-46bb-9ad3-03bddd7dc682" containerName="oauth-openshift" probeResult="failure" output="Get \"https://10.217.0.14:6443/healthz\": dial tcp 10.217.0.14:6443: connect: connection refused" Sep 29 19:10:43 crc kubenswrapper[4779]: I0929 19:10:43.906116 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 19:10:43 crc kubenswrapper[4779]: E0929 19:10:43.906473 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 19:10:44.406432627 +0000 UTC m=+155.290857727 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 19:10:43 crc kubenswrapper[4779]: I0929 19:10:43.915806 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-9tvpg" event={"ID":"336740f0-4088-499f-8090-b2c86ce4bf28","Type":"ContainerStarted","Data":"92f487fd453bb466bc3798fd3074b5304b1b6c2d4639c5c137e5532a5d0812b9"} Sep 29 19:10:43 crc kubenswrapper[4779]: I0929 19:10:43.915876 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-9tvpg" event={"ID":"336740f0-4088-499f-8090-b2c86ce4bf28","Type":"ContainerStarted","Data":"bff66fe3d5dd28455e4f15ccc33e95571221d19834fc09f9b77fde0fa4c16293"} Sep 29 19:10:43 crc kubenswrapper[4779]: W0929 19:10:43.916813 4779 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1209880f_ab8c_4964_8796_81bc1fdf803b.slice/crio-ce352b1aa9c64ca73cb967bdde345a8192a259c657889f1c2d1d9f4255786e8e WatchSource:0}: Error finding container ce352b1aa9c64ca73cb967bdde345a8192a259c657889f1c2d1d9f4255786e8e: Status 404 returned error can't find the container with id ce352b1aa9c64ca73cb967bdde345a8192a259c657889f1c2d1d9f4255786e8e Sep 29 19:10:43 crc kubenswrapper[4779]: I0929 19:10:43.921243 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-z2jm2" event={"ID":"25ae8232-dd23-4bbd-bae1-76ab7f9ce10c","Type":"ContainerStarted","Data":"9bf239e3a5fc599385467acabd41f1f30ec40510b2caae5d2ce084d84d7e3919"} Sep 29 19:10:43 crc kubenswrapper[4779]: I0929 19:10:43.925206 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-4bzgw" event={"ID":"b2767e5e-6a38-4668-95f6-f677e298c6f8","Type":"ContainerStarted","Data":"504962f7bed696cea513726e027544a119bb639e609751000c8093c072aa8c89"} Sep 29 19:10:43 crc kubenswrapper[4779]: I0929 19:10:43.926652 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-hr49g" event={"ID":"8f956c33-ffd9-4517-b8d2-febf5b12f2d0","Type":"ContainerStarted","Data":"20e7fa484f4a091f9427e7d96b54d3814b3382a4e79da76d107d30546fdc046e"} Sep 29 19:10:43 crc kubenswrapper[4779]: I0929 19:10:43.950771 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-wsbvn"] Sep 29 19:10:43 crc kubenswrapper[4779]: I0929 19:10:43.970656 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-b57sr"] Sep 29 19:10:44 crc kubenswrapper[4779]: I0929 19:10:44.010406 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-tgwxw\" (UID: \"08924ea4-79d3-439f-8bdb-150f807221d9\") " pod="openshift-image-registry/image-registry-697d97f7c8-tgwxw" Sep 29 19:10:44 crc kubenswrapper[4779]: E0929 19:10:44.012804 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 19:10:44.512790302 +0000 UTC m=+155.397215402 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-tgwxw" (UID: "08924ea4-79d3-439f-8bdb-150f807221d9") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 19:10:44 crc kubenswrapper[4779]: I0929 19:10:44.111174 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 19:10:44 crc kubenswrapper[4779]: E0929 19:10:44.112187 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 19:10:44.612168978 +0000 UTC m=+155.496594068 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 19:10:44 crc kubenswrapper[4779]: I0929 19:10:44.213851 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-tgwxw\" (UID: \"08924ea4-79d3-439f-8bdb-150f807221d9\") " pod="openshift-image-registry/image-registry-697d97f7c8-tgwxw" Sep 29 19:10:44 crc kubenswrapper[4779]: E0929 19:10:44.214163 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 19:10:44.714153424 +0000 UTC m=+155.598578524 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-tgwxw" (UID: "08924ea4-79d3-439f-8bdb-150f807221d9") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 19:10:44 crc kubenswrapper[4779]: I0929 19:10:44.316109 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 19:10:44 crc kubenswrapper[4779]: E0929 19:10:44.316360 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 19:10:44.816307804 +0000 UTC m=+155.700732914 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 19:10:44 crc kubenswrapper[4779]: I0929 19:10:44.316613 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-tgwxw\" (UID: \"08924ea4-79d3-439f-8bdb-150f807221d9\") " pod="openshift-image-registry/image-registry-697d97f7c8-tgwxw" Sep 29 19:10:44 crc kubenswrapper[4779]: E0929 19:10:44.317015 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 19:10:44.817001013 +0000 UTC m=+155.701426113 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-tgwxw" (UID: "08924ea4-79d3-439f-8bdb-150f807221d9") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 19:10:44 crc kubenswrapper[4779]: I0929 19:10:44.418708 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 19:10:44 crc kubenswrapper[4779]: E0929 19:10:44.419249 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 19:10:44.919233006 +0000 UTC m=+155.803658106 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 19:10:44 crc kubenswrapper[4779]: I0929 19:10:44.513163 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/machine-api-operator-5694c8668f-66qtp" podStartSLOduration=134.513147683 podStartE2EDuration="2m14.513147683s" podCreationTimestamp="2025-09-29 19:08:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 19:10:44.511291513 +0000 UTC m=+155.395716623" watchObservedRunningTime="2025-09-29 19:10:44.513147683 +0000 UTC m=+155.397572783" Sep 29 19:10:44 crc kubenswrapper[4779]: I0929 19:10:44.520880 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-tgwxw\" (UID: \"08924ea4-79d3-439f-8bdb-150f807221d9\") " pod="openshift-image-registry/image-registry-697d97f7c8-tgwxw" Sep 29 19:10:44 crc kubenswrapper[4779]: E0929 19:10:44.521195 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 19:10:45.021181931 +0000 UTC m=+155.905607031 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-tgwxw" (UID: "08924ea4-79d3-439f-8bdb-150f807221d9") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 19:10:44 crc kubenswrapper[4779]: I0929 19:10:44.623252 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 19:10:44 crc kubenswrapper[4779]: E0929 19:10:44.623882 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 19:10:45.123865506 +0000 UTC m=+156.008290606 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 19:10:44 crc kubenswrapper[4779]: I0929 19:10:44.706115 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-ingress/router-default-5444994796-4bzgw" Sep 29 19:10:44 crc kubenswrapper[4779]: I0929 19:10:44.714655 4779 patch_prober.go:28] interesting pod/router-default-5444994796-4bzgw container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Sep 29 19:10:44 crc kubenswrapper[4779]: [-]has-synced failed: reason withheld Sep 29 19:10:44 crc kubenswrapper[4779]: [+]process-running ok Sep 29 19:10:44 crc kubenswrapper[4779]: healthz check failed Sep 29 19:10:44 crc kubenswrapper[4779]: I0929 19:10:44.714703 4779 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-4bzgw" podUID="b2767e5e-6a38-4668-95f6-f677e298c6f8" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Sep 29 19:10:44 crc kubenswrapper[4779]: I0929 19:10:44.724567 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-tgwxw\" (UID: \"08924ea4-79d3-439f-8bdb-150f807221d9\") " pod="openshift-image-registry/image-registry-697d97f7c8-tgwxw" Sep 29 19:10:44 crc kubenswrapper[4779]: E0929 19:10:44.724977 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 19:10:45.224961198 +0000 UTC m=+156.109386298 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-tgwxw" (UID: "08924ea4-79d3-439f-8bdb-150f807221d9") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 19:10:44 crc kubenswrapper[4779]: I0929 19:10:44.826098 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 19:10:44 crc kubenswrapper[4779]: E0929 19:10:44.826226 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 19:10:45.326202114 +0000 UTC m=+156.210627214 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 19:10:44 crc kubenswrapper[4779]: I0929 19:10:44.826664 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-tgwxw\" (UID: \"08924ea4-79d3-439f-8bdb-150f807221d9\") " pod="openshift-image-registry/image-registry-697d97f7c8-tgwxw" Sep 29 19:10:44 crc kubenswrapper[4779]: E0929 19:10:44.826979 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 19:10:45.326971665 +0000 UTC m=+156.211396765 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-tgwxw" (UID: "08924ea4-79d3-439f-8bdb-150f807221d9") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 19:10:44 crc kubenswrapper[4779]: I0929 19:10:44.885374 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress/router-default-5444994796-4bzgw" podStartSLOduration=135.885357099 podStartE2EDuration="2m15.885357099s" podCreationTimestamp="2025-09-29 19:08:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 19:10:44.883643082 +0000 UTC m=+155.768068182" watchObservedRunningTime="2025-09-29 19:10:44.885357099 +0000 UTC m=+155.769782199" Sep 29 19:10:44 crc kubenswrapper[4779]: I0929 19:10:44.927171 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 19:10:44 crc kubenswrapper[4779]: E0929 19:10:44.927578 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 19:10:45.427561684 +0000 UTC m=+156.311986784 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 19:10:44 crc kubenswrapper[4779]: I0929 19:10:44.940062 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-lx98g" podStartSLOduration=134.940043202 podStartE2EDuration="2m14.940043202s" podCreationTimestamp="2025-09-29 19:08:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 19:10:44.939055165 +0000 UTC m=+155.823480255" watchObservedRunningTime="2025-09-29 19:10:44.940043202 +0000 UTC m=+155.824468302" Sep 29 19:10:44 crc kubenswrapper[4779]: I0929 19:10:44.949973 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-ksgbj" event={"ID":"118f9ff6-7433-41ee-a6bd-84e14fd1ea98","Type":"ContainerStarted","Data":"cbc304b0d8b472f9d6059576e5907ebb6dc5547fd4eb4ffbe1ecae4267a2e608"} Sep 29 19:10:44 crc kubenswrapper[4779]: I0929 19:10:44.966161 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-bh65l" event={"ID":"9e558fcb-c770-4581-a192-0f396bab99c7","Type":"ContainerStarted","Data":"20d9e0c2d4d72b29c64827cdfd667b4478462d36a46dc44292fc06fab5dbefe5"} Sep 29 19:10:44 crc kubenswrapper[4779]: I0929 19:10:44.969457 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-4vf95" event={"ID":"078d7a8c-aa40-42c3-b26e-30dd2a01cae0","Type":"ContainerStarted","Data":"e76a063e7bd431e290025e2bf1d963bc84e6faf5cfcac56f8dbc53efd0168bf0"} Sep 29 19:10:44 crc kubenswrapper[4779]: I0929 19:10:44.970578 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/downloads-7954f5f757-4vf95" Sep 29 19:10:44 crc kubenswrapper[4779]: I0929 19:10:44.978911 4779 patch_prober.go:28] interesting pod/downloads-7954f5f757-4vf95 container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.31:8080/\": dial tcp 10.217.0.31:8080: connect: connection refused" start-of-body= Sep 29 19:10:44 crc kubenswrapper[4779]: I0929 19:10:44.979123 4779 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-4vf95" podUID="078d7a8c-aa40-42c3-b26e-30dd2a01cae0" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.31:8080/\": dial tcp 10.217.0.31:8080: connect: connection refused" Sep 29 19:10:44 crc kubenswrapper[4779]: I0929 19:10:44.990442 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-fpfpb" event={"ID":"9b795657-72fc-4b72-9186-f0dc24678b36","Type":"ContainerStarted","Data":"eaf0a13e29f0dedcc46dfbfed10ab0b19f76f62448fa2a4df4319f98e45dbb11"} Sep 29 19:10:44 crc kubenswrapper[4779]: I0929 19:10:44.990482 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-fpfpb" event={"ID":"9b795657-72fc-4b72-9186-f0dc24678b36","Type":"ContainerStarted","Data":"5fea85a5d1fb2dc38252dab52b9bbf8d590ea5aef89f8f77f2192df7c54b2633"} Sep 29 19:10:44 crc kubenswrapper[4779]: I0929 19:10:44.991571 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-558db77b4-pjjch" podStartSLOduration=135.991551999 podStartE2EDuration="2m15.991551999s" podCreationTimestamp="2025-09-29 19:08:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 19:10:44.990939873 +0000 UTC m=+155.875364973" watchObservedRunningTime="2025-09-29 19:10:44.991551999 +0000 UTC m=+155.875977109" Sep 29 19:10:44 crc kubenswrapper[4779]: I0929 19:10:44.993062 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-vvp2h" event={"ID":"d6ea9901-9f72-4641-b427-15ae8e7a5bfa","Type":"ContainerStarted","Data":"9faffffd1c87a4eff89c432e0e88f52729adbd3476c08fe7d30b74aa99b6f41b"} Sep 29 19:10:44 crc kubenswrapper[4779]: I0929 19:10:44.993090 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-vvp2h" event={"ID":"d6ea9901-9f72-4641-b427-15ae8e7a5bfa","Type":"ContainerStarted","Data":"f81c9771f5b4ac885e438e8059a118962687f0e0f55e19259e10dbbeee1ce463"} Sep 29 19:10:45 crc kubenswrapper[4779]: I0929 19:10:45.007628 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29319540-c6vdh" event={"ID":"2d999e79-a467-4f19-a67a-f5993c6b4423","Type":"ContainerStarted","Data":"cd53d2289ffddfa4e5198db41eeb503c449f0e6870cff978d2687dd64c7d6c42"} Sep 29 19:10:45 crc kubenswrapper[4779]: I0929 19:10:45.007672 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29319540-c6vdh" event={"ID":"2d999e79-a467-4f19-a67a-f5993c6b4423","Type":"ContainerStarted","Data":"3249fc9a4245d55f2c8e73ac8765ebd3648841699829bd995a27626f1856ff9c"} Sep 29 19:10:45 crc kubenswrapper[4779]: I0929 19:10:45.014013 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-fcw8k" event={"ID":"e9fff169-c8a1-4062-9a2a-ab4c1e790c07","Type":"ContainerStarted","Data":"32e2803e3419b749b788e765166ac25b10dea7693b34246ef4d6255d12bdc64c"} Sep 29 19:10:45 crc kubenswrapper[4779]: I0929 19:10:45.018440 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-b57sr" event={"ID":"dc132003-e19f-4bdc-b77f-69dbe408b68f","Type":"ContainerStarted","Data":"bdf3f03d70a1bc7b610f73b964fab1e4b93e2d30f0435f4d581c6fe9f9ee6d83"} Sep 29 19:10:45 crc kubenswrapper[4779]: I0929 19:10:45.030148 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-tgwxw\" (UID: \"08924ea4-79d3-439f-8bdb-150f807221d9\") " pod="openshift-image-registry/image-registry-697d97f7c8-tgwxw" Sep 29 19:10:45 crc kubenswrapper[4779]: E0929 19:10:45.031290 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 19:10:45.531274417 +0000 UTC m=+156.415699517 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-tgwxw" (UID: "08924ea4-79d3-439f-8bdb-150f807221d9") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 19:10:45 crc kubenswrapper[4779]: I0929 19:10:45.032553 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-server-blwj9" podStartSLOduration=7.032540981 podStartE2EDuration="7.032540981s" podCreationTimestamp="2025-09-29 19:10:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 19:10:45.029891779 +0000 UTC m=+155.914316879" watchObservedRunningTime="2025-09-29 19:10:45.032540981 +0000 UTC m=+155.916966081" Sep 29 19:10:45 crc kubenswrapper[4779]: I0929 19:10:45.033838 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-h4zfh" event={"ID":"72bf75a9-d335-40b3-ad88-bc72472d4256","Type":"ContainerStarted","Data":"bff5bf607d9841981c895db8c7091725e97fe64b99cacb0d3bf2640c0bc91709"} Sep 29 19:10:45 crc kubenswrapper[4779]: I0929 19:10:45.034450 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-h4zfh" Sep 29 19:10:45 crc kubenswrapper[4779]: I0929 19:10:45.059638 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-s9rfn" podStartSLOduration=135.059620315 podStartE2EDuration="2m15.059620315s" podCreationTimestamp="2025-09-29 19:08:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 19:10:45.05943757 +0000 UTC m=+155.943862670" watchObservedRunningTime="2025-09-29 19:10:45.059620315 +0000 UTC m=+155.944045415" Sep 29 19:10:45 crc kubenswrapper[4779]: I0929 19:10:45.069551 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-jh4mm" event={"ID":"1ae0fa99-1f97-4233-953b-fd63f9a6a418","Type":"ContainerStarted","Data":"71a0c2f4bfb3862e85950857ff067b1a77c7f207210e733757a16ad75bb1ffee"} Sep 29 19:10:45 crc kubenswrapper[4779]: I0929 19:10:45.070200 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console-operator/console-operator-58897d9998-jh4mm" Sep 29 19:10:45 crc kubenswrapper[4779]: I0929 19:10:45.072074 4779 patch_prober.go:28] interesting pod/console-operator-58897d9998-jh4mm container/console-operator namespace/openshift-console-operator: Readiness probe status=failure output="Get \"https://10.217.0.7:8443/readyz\": dial tcp 10.217.0.7:8443: connect: connection refused" start-of-body= Sep 29 19:10:45 crc kubenswrapper[4779]: I0929 19:10:45.072115 4779 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console-operator/console-operator-58897d9998-jh4mm" podUID="1ae0fa99-1f97-4233-953b-fd63f9a6a418" containerName="console-operator" probeResult="failure" output="Get \"https://10.217.0.7:8443/readyz\": dial tcp 10.217.0.7:8443: connect: connection refused" Sep 29 19:10:45 crc kubenswrapper[4779]: I0929 19:10:45.079532 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-rrmfq" event={"ID":"014a4efc-c5f5-42c9-b90f-557a51659e67","Type":"ContainerStarted","Data":"bbc2fbd0075ae905adad7e4b2e63dca2d83f94f2af81c58fa0cb1da0a7f7bc1b"} Sep 29 19:10:45 crc kubenswrapper[4779]: I0929 19:10:45.120970 4779 generic.go:334] "Generic (PLEG): container finished" podID="308f2878-9a95-4b6a-8d03-90c431c05c1f" containerID="8099f76399436fd75a30d15ab8d17d27f9ab0085f5921a38ae11da81c91ddfe5" exitCode=0 Sep 29 19:10:45 crc kubenswrapper[4779]: I0929 19:10:45.121039 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-lzrjb" event={"ID":"308f2878-9a95-4b6a-8d03-90c431c05c1f","Type":"ContainerDied","Data":"8099f76399436fd75a30d15ab8d17d27f9ab0085f5921a38ae11da81c91ddfe5"} Sep 29 19:10:45 crc kubenswrapper[4779]: I0929 19:10:45.131296 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-9tvpg" event={"ID":"336740f0-4088-499f-8090-b2c86ce4bf28","Type":"ContainerStarted","Data":"0c932fbc88e1f80403245c632536d9ae819468a666ad7de57c6c3974c4d2b66e"} Sep 29 19:10:45 crc kubenswrapper[4779]: I0929 19:10:45.132185 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 19:10:45 crc kubenswrapper[4779]: I0929 19:10:45.132493 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-admission-controller-857f4d67dd-nrx92" podStartSLOduration=135.132474061 podStartE2EDuration="2m15.132474061s" podCreationTimestamp="2025-09-29 19:08:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 19:10:45.132135222 +0000 UTC m=+156.016560342" watchObservedRunningTime="2025-09-29 19:10:45.132474061 +0000 UTC m=+156.016899161" Sep 29 19:10:45 crc kubenswrapper[4779]: E0929 19:10:45.133289 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 19:10:45.633275823 +0000 UTC m=+156.517700923 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 19:10:45 crc kubenswrapper[4779]: I0929 19:10:45.133723 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-n7hnm" podStartSLOduration=136.133713325 podStartE2EDuration="2m16.133713325s" podCreationTimestamp="2025-09-29 19:08:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 19:10:45.089060274 +0000 UTC m=+155.973485374" watchObservedRunningTime="2025-09-29 19:10:45.133713325 +0000 UTC m=+156.018138425" Sep 29 19:10:45 crc kubenswrapper[4779]: I0929 19:10:45.152830 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-wsbvn" event={"ID":"7a54027e-55dc-42f9-aa21-1c82ec4d1b4b","Type":"ContainerStarted","Data":"188f7b6c4a220254a5a07b24eb3f36ecc3962f9adbd3a936facb31742556a932"} Sep 29 19:10:45 crc kubenswrapper[4779]: I0929 19:10:45.153622 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-wsbvn" Sep 29 19:10:45 crc kubenswrapper[4779]: I0929 19:10:45.164483 4779 patch_prober.go:28] interesting pod/packageserver-d55dfcdfc-wsbvn container/packageserver namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.38:5443/healthz\": dial tcp 10.217.0.38:5443: connect: connection refused" start-of-body= Sep 29 19:10:45 crc kubenswrapper[4779]: I0929 19:10:45.164548 4779 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-wsbvn" podUID="7a54027e-55dc-42f9-aa21-1c82ec4d1b4b" containerName="packageserver" probeResult="failure" output="Get \"https://10.217.0.38:5443/healthz\": dial tcp 10.217.0.38:5443: connect: connection refused" Sep 29 19:10:45 crc kubenswrapper[4779]: I0929 19:10:45.199737 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-s7xfz" event={"ID":"0bf68edf-b009-46c6-bdc0-05c48967d5d6","Type":"ContainerStarted","Data":"c7afa054bd4d3ff12faf96004701c9ce936a9794283be860fae503da44ba2722"} Sep 29 19:10:45 crc kubenswrapper[4779]: I0929 19:10:45.199784 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-s7xfz" event={"ID":"0bf68edf-b009-46c6-bdc0-05c48967d5d6","Type":"ContainerStarted","Data":"5c68e4991c00bd11a03775be707f06331769cf185ad11b41c85826844a5679fe"} Sep 29 19:10:45 crc kubenswrapper[4779]: I0929 19:10:45.220021 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-85d4c" podStartSLOduration=136.219995035 podStartE2EDuration="2m16.219995035s" podCreationTimestamp="2025-09-29 19:08:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 19:10:45.180616117 +0000 UTC m=+156.065041217" watchObservedRunningTime="2025-09-29 19:10:45.219995035 +0000 UTC m=+156.104420135" Sep 29 19:10:45 crc kubenswrapper[4779]: I0929 19:10:45.235668 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-tgwxw\" (UID: \"08924ea4-79d3-439f-8bdb-150f807221d9\") " pod="openshift-image-registry/image-registry-697d97f7c8-tgwxw" Sep 29 19:10:45 crc kubenswrapper[4779]: E0929 19:10:45.237655 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 19:10:45.73748583 +0000 UTC m=+156.621910930 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-tgwxw" (UID: "08924ea4-79d3-439f-8bdb-150f807221d9") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 19:10:45 crc kubenswrapper[4779]: I0929 19:10:45.263878 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-85d4c" event={"ID":"7dc6c9bd-e7c8-44c0-b65d-0e335cc3b134","Type":"ContainerStarted","Data":"31b168d0438c984ef1ce0d26c8775dfa402414cf51e37dc6e6b90c51e06a98fe"} Sep 29 19:10:45 crc kubenswrapper[4779]: I0929 19:10:45.272906 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-z2jm2" podStartSLOduration=136.27289105 podStartE2EDuration="2m16.27289105s" podCreationTimestamp="2025-09-29 19:08:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 19:10:45.221617239 +0000 UTC m=+156.106042359" watchObservedRunningTime="2025-09-29 19:10:45.27289105 +0000 UTC m=+156.157316150" Sep 29 19:10:45 crc kubenswrapper[4779]: I0929 19:10:45.279665 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd-operator/etcd-operator-b45778765-d5hk2" podStartSLOduration=136.279647013 podStartE2EDuration="2m16.279647013s" podCreationTimestamp="2025-09-29 19:08:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 19:10:45.271966335 +0000 UTC m=+156.156391435" watchObservedRunningTime="2025-09-29 19:10:45.279647013 +0000 UTC m=+156.164072113" Sep 29 19:10:45 crc kubenswrapper[4779]: I0929 19:10:45.312101 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-bc7qj" event={"ID":"fd9b8ba1-513d-4c37-aba0-ce6856f5f55c","Type":"ContainerStarted","Data":"f7baf83eadc796dae63976bec2cbe30e1b7a94e4527172b561346f5cc7c9c91c"} Sep 29 19:10:45 crc kubenswrapper[4779]: I0929 19:10:45.314915 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-fcw8k" podStartSLOduration=136.314898529 podStartE2EDuration="2m16.314898529s" podCreationTimestamp="2025-09-29 19:08:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 19:10:45.313169963 +0000 UTC m=+156.197595063" watchObservedRunningTime="2025-09-29 19:10:45.314898529 +0000 UTC m=+156.199323629" Sep 29 19:10:45 crc kubenswrapper[4779]: I0929 19:10:45.335937 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-hr49g" event={"ID":"8f956c33-ffd9-4517-b8d2-febf5b12f2d0","Type":"ContainerStarted","Data":"fae8ab694823c77e301b70b14a6de4c86286b0812f0c32844500e4c9ad20cd7f"} Sep 29 19:10:45 crc kubenswrapper[4779]: I0929 19:10:45.336692 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 19:10:45 crc kubenswrapper[4779]: E0929 19:10:45.336877 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 19:10:45.836852235 +0000 UTC m=+156.721277325 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 19:10:45 crc kubenswrapper[4779]: I0929 19:10:45.337006 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-tgwxw\" (UID: \"08924ea4-79d3-439f-8bdb-150f807221d9\") " pod="openshift-image-registry/image-registry-697d97f7c8-tgwxw" Sep 29 19:10:45 crc kubenswrapper[4779]: E0929 19:10:45.337468 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 19:10:45.837453481 +0000 UTC m=+156.721878581 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-tgwxw" (UID: "08924ea4-79d3-439f-8bdb-150f807221d9") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 19:10:45 crc kubenswrapper[4779]: I0929 19:10:45.341675 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-kdxj8" event={"ID":"44f9f157-a816-4fd6-aff5-eccc66abb454","Type":"ContainerStarted","Data":"744a348d2c0ee781829a03392076473f502223326c80ecfb8a5767c397a81fbe"} Sep 29 19:10:45 crc kubenswrapper[4779]: I0929 19:10:45.343773 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-kdxj8" Sep 29 19:10:45 crc kubenswrapper[4779]: I0929 19:10:45.367702 4779 patch_prober.go:28] interesting pod/olm-operator-6b444d44fb-kdxj8 container/olm-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.27:8443/healthz\": dial tcp 10.217.0.27:8443: connect: connection refused" start-of-body= Sep 29 19:10:45 crc kubenswrapper[4779]: I0929 19:10:45.367756 4779 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-kdxj8" podUID="44f9f157-a816-4fd6-aff5-eccc66abb454" containerName="olm-operator" probeResult="failure" output="Get \"https://10.217.0.27:8443/healthz\": dial tcp 10.217.0.27:8443: connect: connection refused" Sep 29 19:10:45 crc kubenswrapper[4779]: I0929 19:10:45.370521 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-px8c9" event={"ID":"b3f6a87f-c596-4bfd-9330-13b5b8fabfe4","Type":"ContainerStarted","Data":"0638b424014bcd99324dd6e6ebd1f6ad4cda15641e3cd04a62e065b41985d425"} Sep 29 19:10:45 crc kubenswrapper[4779]: I0929 19:10:45.377418 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-cbd6x" event={"ID":"1209880f-ab8c-4964-8796-81bc1fdf803b","Type":"ContainerStarted","Data":"ce352b1aa9c64ca73cb967bdde345a8192a259c657889f1c2d1d9f4255786e8e"} Sep 29 19:10:45 crc kubenswrapper[4779]: I0929 19:10:45.385827 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-rrmfq" podStartSLOduration=136.385811183 podStartE2EDuration="2m16.385811183s" podCreationTimestamp="2025-09-29 19:08:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 19:10:45.385624618 +0000 UTC m=+156.270049718" watchObservedRunningTime="2025-09-29 19:10:45.385811183 +0000 UTC m=+156.270236283" Sep 29 19:10:45 crc kubenswrapper[4779]: I0929 19:10:45.387270 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/downloads-7954f5f757-4vf95" podStartSLOduration=136.387264642 podStartE2EDuration="2m16.387264642s" podCreationTimestamp="2025-09-29 19:08:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 19:10:45.34073331 +0000 UTC m=+156.225158420" watchObservedRunningTime="2025-09-29 19:10:45.387264642 +0000 UTC m=+156.271689742" Sep 29 19:10:45 crc kubenswrapper[4779]: I0929 19:10:45.417519 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-bt79h" event={"ID":"ec0b9d1c-6776-4434-9a56-756a3fc1fc5e","Type":"ContainerStarted","Data":"149337cc4c25d47ca0668834e7e6ec91e5e106613863c5b03a3e0a8b2f2d7ed3"} Sep 29 19:10:45 crc kubenswrapper[4779]: I0929 19:10:45.418466 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-bt79h" Sep 29 19:10:45 crc kubenswrapper[4779]: I0929 19:10:45.422303 4779 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-bt79h container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.25:8080/healthz\": dial tcp 10.217.0.25:8080: connect: connection refused" start-of-body= Sep 29 19:10:45 crc kubenswrapper[4779]: I0929 19:10:45.422372 4779 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-bt79h" podUID="ec0b9d1c-6776-4434-9a56-756a3fc1fc5e" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.25:8080/healthz\": dial tcp 10.217.0.25:8080: connect: connection refused" Sep 29 19:10:45 crc kubenswrapper[4779]: I0929 19:10:45.426334 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-hhcst" event={"ID":"52419e1c-e6c0-4225-95bf-da711c24e399","Type":"ContainerStarted","Data":"1d0d0b98a8f7584b56aba7dd5a3a46d92044e49c9cbd883378512f02b6fe215d"} Sep 29 19:10:45 crc kubenswrapper[4779]: I0929 19:10:45.437489 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 19:10:45 crc kubenswrapper[4779]: E0929 19:10:45.437662 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 19:10:45.937637538 +0000 UTC m=+156.822062638 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 19:10:45 crc kubenswrapper[4779]: I0929 19:10:45.437803 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-tgwxw\" (UID: \"08924ea4-79d3-439f-8bdb-150f807221d9\") " pod="openshift-image-registry/image-registry-697d97f7c8-tgwxw" Sep 29 19:10:45 crc kubenswrapper[4779]: E0929 19:10:45.439071 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 19:10:45.939055677 +0000 UTC m=+156.823480777 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-tgwxw" (UID: "08924ea4-79d3-439f-8bdb-150f807221d9") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 19:10:45 crc kubenswrapper[4779]: I0929 19:10:45.455407 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-h4zfh" podStartSLOduration=135.45538809 podStartE2EDuration="2m15.45538809s" podCreationTimestamp="2025-09-29 19:08:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 19:10:45.417184464 +0000 UTC m=+156.301609564" watchObservedRunningTime="2025-09-29 19:10:45.45538809 +0000 UTC m=+156.339813190" Sep 29 19:10:45 crc kubenswrapper[4779]: I0929 19:10:45.457754 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-wsbvn" podStartSLOduration=135.457746244 podStartE2EDuration="2m15.457746244s" podCreationTimestamp="2025-09-29 19:08:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 19:10:45.456612773 +0000 UTC m=+156.341037873" watchObservedRunningTime="2025-09-29 19:10:45.457746244 +0000 UTC m=+156.342171344" Sep 29 19:10:45 crc kubenswrapper[4779]: I0929 19:10:45.462252 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-nxttj" event={"ID":"05826cf2-7094-46a7-a08f-8f39f5fb3520","Type":"ContainerStarted","Data":"ee9185266ac8fa9f211dbc5eb1af0d323fbf8273a3db67ea2608019bbcc01221"} Sep 29 19:10:45 crc kubenswrapper[4779]: I0929 19:10:45.480440 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-2n6g9" event={"ID":"01f30d6b-1381-4cd8-9a61-ed94d536d2a2","Type":"ContainerStarted","Data":"b20f791835a8cb03c48253fed674b722fc28e788fa07e3105cae3808a10007bc"} Sep 29 19:10:45 crc kubenswrapper[4779]: I0929 19:10:45.480494 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-2n6g9" event={"ID":"01f30d6b-1381-4cd8-9a61-ed94d536d2a2","Type":"ContainerStarted","Data":"8c0eabd870b0c325db567bf6bc4b57dc0323467e91833df392d684e0facbe328"} Sep 29 19:10:45 crc kubenswrapper[4779]: I0929 19:10:45.485940 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-6g6c5" event={"ID":"e5039b4e-b9b6-4d60-b5b6-c5d5c6765ac8","Type":"ContainerStarted","Data":"0c319551f8f25d6feae0566cad3c1190ff4ce766f2ec5c40e145bd3e3ccb8794"} Sep 29 19:10:45 crc kubenswrapper[4779]: I0929 19:10:45.486150 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-6g6c5" event={"ID":"e5039b4e-b9b6-4d60-b5b6-c5d5c6765ac8","Type":"ContainerStarted","Data":"ddac9563a81b5295dd9b38433c58b362bc25f7331e9d2a134d91c2e6c1b28749"} Sep 29 19:10:45 crc kubenswrapper[4779]: I0929 19:10:45.502991 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-6js75" event={"ID":"935082cd-101b-44e0-8315-186d166a1b2a","Type":"ContainerStarted","Data":"cd8868efaba823178952853190c008edeaa7e6f88f31efddf4c81266b8a0255e"} Sep 29 19:10:45 crc kubenswrapper[4779]: I0929 19:10:45.503047 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-6js75" event={"ID":"935082cd-101b-44e0-8315-186d166a1b2a","Type":"ContainerStarted","Data":"a68626c8d03a160b9cf77f43eaf71228a50e380f1c1d41068ce3ab8f2114c767"} Sep 29 19:10:45 crc kubenswrapper[4779]: I0929 19:10:45.503525 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-6js75" Sep 29 19:10:45 crc kubenswrapper[4779]: I0929 19:10:45.505292 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console-operator/console-operator-58897d9998-jh4mm" podStartSLOduration=136.505274673 podStartE2EDuration="2m16.505274673s" podCreationTimestamp="2025-09-29 19:08:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 19:10:45.504330717 +0000 UTC m=+156.388755817" watchObservedRunningTime="2025-09-29 19:10:45.505274673 +0000 UTC m=+156.389699773" Sep 29 19:10:45 crc kubenswrapper[4779]: I0929 19:10:45.509784 4779 patch_prober.go:28] interesting pod/catalog-operator-68c6474976-6js75 container/catalog-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.39:8443/healthz\": dial tcp 10.217.0.39:8443: connect: connection refused" start-of-body= Sep 29 19:10:45 crc kubenswrapper[4779]: I0929 19:10:45.509967 4779 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-6js75" podUID="935082cd-101b-44e0-8315-186d166a1b2a" containerName="catalog-operator" probeResult="failure" output="Get \"https://10.217.0.39:8443/healthz\": dial tcp 10.217.0.39:8443: connect: connection refused" Sep 29 19:10:45 crc kubenswrapper[4779]: I0929 19:10:45.511273 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-q75j6" event={"ID":"a64632ed-ba47-4cfb-96a9-349b32995c3a","Type":"ContainerStarted","Data":"f8e66a272a08b712b54c036208507b46daa1bf1baa7fade0dff46eae9111c4a7"} Sep 29 19:10:45 crc kubenswrapper[4779]: I0929 19:10:45.514353 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-s9rfn" Sep 29 19:10:45 crc kubenswrapper[4779]: I0929 19:10:45.515270 4779 patch_prober.go:28] interesting pod/controller-manager-879f6c89f-qlvq5 container/controller-manager namespace/openshift-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.5:8443/healthz\": dial tcp 10.217.0.5:8443: connect: connection refused" start-of-body= Sep 29 19:10:45 crc kubenswrapper[4779]: I0929 19:10:45.515336 4779 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-controller-manager/controller-manager-879f6c89f-qlvq5" podUID="8ed9d7da-9845-4c1a-8ec8-98f610a7cc9a" containerName="controller-manager" probeResult="failure" output="Get \"https://10.217.0.5:8443/healthz\": dial tcp 10.217.0.5:8443: connect: connection refused" Sep 29 19:10:45 crc kubenswrapper[4779]: I0929 19:10:45.538984 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 19:10:45 crc kubenswrapper[4779]: I0929 19:10:45.540383 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-558db77b4-pjjch" Sep 29 19:10:45 crc kubenswrapper[4779]: E0929 19:10:45.540883 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 19:10:46.040869358 +0000 UTC m=+156.925294458 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 19:10:45 crc kubenswrapper[4779]: I0929 19:10:45.581363 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns-operator/dns-operator-744455d44c-9tvpg" podStartSLOduration=136.581344856 podStartE2EDuration="2m16.581344856s" podCreationTimestamp="2025-09-29 19:08:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 19:10:45.540797646 +0000 UTC m=+156.425222746" watchObservedRunningTime="2025-09-29 19:10:45.581344856 +0000 UTC m=+156.465769956" Sep 29 19:10:45 crc kubenswrapper[4779]: I0929 19:10:45.581784 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-ksgbj" podStartSLOduration=136.581779158 podStartE2EDuration="2m16.581779158s" podCreationTimestamp="2025-09-29 19:08:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 19:10:45.57780962 +0000 UTC m=+156.462234720" watchObservedRunningTime="2025-09-29 19:10:45.581779158 +0000 UTC m=+156.466204248" Sep 29 19:10:45 crc kubenswrapper[4779]: I0929 19:10:45.642530 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-tgwxw\" (UID: \"08924ea4-79d3-439f-8bdb-150f807221d9\") " pod="openshift-image-registry/image-registry-697d97f7c8-tgwxw" Sep 29 19:10:45 crc kubenswrapper[4779]: E0929 19:10:45.644693 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 19:10:46.144671034 +0000 UTC m=+157.029096134 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-tgwxw" (UID: "08924ea4-79d3-439f-8bdb-150f807221d9") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 19:10:45 crc kubenswrapper[4779]: I0929 19:10:45.686405 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca-operator/service-ca-operator-777779d784-s7xfz" podStartSLOduration=135.686392206 podStartE2EDuration="2m15.686392206s" podCreationTimestamp="2025-09-29 19:08:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 19:10:45.685678026 +0000 UTC m=+156.570103126" watchObservedRunningTime="2025-09-29 19:10:45.686392206 +0000 UTC m=+156.570817306" Sep 29 19:10:45 crc kubenswrapper[4779]: I0929 19:10:45.688768 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-canary/ingress-canary-vvp2h" podStartSLOduration=6.68875929 podStartE2EDuration="6.68875929s" podCreationTimestamp="2025-09-29 19:10:39 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 19:10:45.656482234 +0000 UTC m=+156.540907334" watchObservedRunningTime="2025-09-29 19:10:45.68875929 +0000 UTC m=+156.573184390" Sep 29 19:10:45 crc kubenswrapper[4779]: I0929 19:10:45.712505 4779 patch_prober.go:28] interesting pod/router-default-5444994796-4bzgw container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Sep 29 19:10:45 crc kubenswrapper[4779]: [-]has-synced failed: reason withheld Sep 29 19:10:45 crc kubenswrapper[4779]: [+]process-running ok Sep 29 19:10:45 crc kubenswrapper[4779]: healthz check failed Sep 29 19:10:45 crc kubenswrapper[4779]: I0929 19:10:45.712576 4779 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-4bzgw" podUID="b2767e5e-6a38-4668-95f6-f677e298c6f8" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Sep 29 19:10:45 crc kubenswrapper[4779]: I0929 19:10:45.734031 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication-operator/authentication-operator-69f744f599-bh65l" podStartSLOduration=136.734013637 podStartE2EDuration="2m16.734013637s" podCreationTimestamp="2025-09-29 19:08:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 19:10:45.731717535 +0000 UTC m=+156.616142665" watchObservedRunningTime="2025-09-29 19:10:45.734013637 +0000 UTC m=+156.618438737" Sep 29 19:10:45 crc kubenswrapper[4779]: I0929 19:10:45.745974 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 19:10:45 crc kubenswrapper[4779]: E0929 19:10:45.746234 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 19:10:46.246218488 +0000 UTC m=+157.130643578 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 19:10:45 crc kubenswrapper[4779]: I0929 19:10:45.846958 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-tgwxw\" (UID: \"08924ea4-79d3-439f-8bdb-150f807221d9\") " pod="openshift-image-registry/image-registry-697d97f7c8-tgwxw" Sep 29 19:10:45 crc kubenswrapper[4779]: E0929 19:10:45.847358 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 19:10:46.347343761 +0000 UTC m=+157.231768861 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-tgwxw" (UID: "08924ea4-79d3-439f-8bdb-150f807221d9") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 19:10:45 crc kubenswrapper[4779]: I0929 19:10:45.867948 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29319540-c6vdh" podStartSLOduration=135.867930949 podStartE2EDuration="2m15.867930949s" podCreationTimestamp="2025-09-29 19:08:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 19:10:45.867679233 +0000 UTC m=+156.752104323" watchObservedRunningTime="2025-09-29 19:10:45.867930949 +0000 UTC m=+156.752356049" Sep 29 19:10:45 crc kubenswrapper[4779]: I0929 19:10:45.928633 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-s9rfn" Sep 29 19:10:45 crc kubenswrapper[4779]: I0929 19:10:45.947910 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 19:10:45 crc kubenswrapper[4779]: E0929 19:10:45.948039 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 19:10:46.448019622 +0000 UTC m=+157.332444722 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 19:10:45 crc kubenswrapper[4779]: I0929 19:10:45.948271 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-tgwxw\" (UID: \"08924ea4-79d3-439f-8bdb-150f807221d9\") " pod="openshift-image-registry/image-registry-697d97f7c8-tgwxw" Sep 29 19:10:45 crc kubenswrapper[4779]: E0929 19:10:45.948735 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 19:10:46.448716081 +0000 UTC m=+157.333141181 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-tgwxw" (UID: "08924ea4-79d3-439f-8bdb-150f807221d9") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 19:10:45 crc kubenswrapper[4779]: I0929 19:10:45.957663 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-cbd6x" podStartSLOduration=135.957647423 podStartE2EDuration="2m15.957647423s" podCreationTimestamp="2025-09-29 19:08:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 19:10:45.95678694 +0000 UTC m=+156.841212070" watchObservedRunningTime="2025-09-29 19:10:45.957647423 +0000 UTC m=+156.842072523" Sep 29 19:10:45 crc kubenswrapper[4779]: I0929 19:10:45.960758 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-2n6g9" podStartSLOduration=136.960740947 podStartE2EDuration="2m16.960740947s" podCreationTimestamp="2025-09-29 19:08:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 19:10:45.903094313 +0000 UTC m=+156.787519413" watchObservedRunningTime="2025-09-29 19:10:45.960740947 +0000 UTC m=+156.845166047" Sep 29 19:10:46 crc kubenswrapper[4779]: I0929 19:10:46.005242 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca/service-ca-9c57cc56f-6g6c5" podStartSLOduration=136.005208123 podStartE2EDuration="2m16.005208123s" podCreationTimestamp="2025-09-29 19:08:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 19:10:45.99623556 +0000 UTC m=+156.880660660" watchObservedRunningTime="2025-09-29 19:10:46.005208123 +0000 UTC m=+156.889633223" Sep 29 19:10:46 crc kubenswrapper[4779]: I0929 19:10:46.028921 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver/apiserver-76f77b778f-nxttj" podStartSLOduration=137.028908366 podStartE2EDuration="2m17.028908366s" podCreationTimestamp="2025-09-29 19:08:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 19:10:46.027551089 +0000 UTC m=+156.911976189" watchObservedRunningTime="2025-09-29 19:10:46.028908366 +0000 UTC m=+156.913333456" Sep 29 19:10:46 crc kubenswrapper[4779]: I0929 19:10:46.049219 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 19:10:46 crc kubenswrapper[4779]: E0929 19:10:46.049431 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 19:10:46.549406182 +0000 UTC m=+157.433831282 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 19:10:46 crc kubenswrapper[4779]: I0929 19:10:46.049528 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-tgwxw\" (UID: \"08924ea4-79d3-439f-8bdb-150f807221d9\") " pod="openshift-image-registry/image-registry-697d97f7c8-tgwxw" Sep 29 19:10:46 crc kubenswrapper[4779]: E0929 19:10:46.049838 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 19:10:46.549826373 +0000 UTC m=+157.434251473 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-tgwxw" (UID: "08924ea4-79d3-439f-8bdb-150f807221d9") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 19:10:46 crc kubenswrapper[4779]: I0929 19:10:46.057840 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-bt79h" podStartSLOduration=136.0578256 podStartE2EDuration="2m16.0578256s" podCreationTimestamp="2025-09-29 19:08:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 19:10:46.057217764 +0000 UTC m=+156.941642864" watchObservedRunningTime="2025-09-29 19:10:46.0578256 +0000 UTC m=+156.942250700" Sep 29 19:10:46 crc kubenswrapper[4779]: I0929 19:10:46.082792 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-kdxj8" podStartSLOduration=136.082778107 podStartE2EDuration="2m16.082778107s" podCreationTimestamp="2025-09-29 19:08:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 19:10:46.08177724 +0000 UTC m=+156.966202340" watchObservedRunningTime="2025-09-29 19:10:46.082778107 +0000 UTC m=+156.967203207" Sep 29 19:10:46 crc kubenswrapper[4779]: I0929 19:10:46.105400 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-6js75" podStartSLOduration=136.10538518 podStartE2EDuration="2m16.10538518s" podCreationTimestamp="2025-09-29 19:08:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 19:10:46.102713268 +0000 UTC m=+156.987138368" watchObservedRunningTime="2025-09-29 19:10:46.10538518 +0000 UTC m=+156.989810280" Sep 29 19:10:46 crc kubenswrapper[4779]: I0929 19:10:46.143536 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-hr49g" podStartSLOduration=137.143521204 podStartE2EDuration="2m17.143521204s" podCreationTimestamp="2025-09-29 19:08:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 19:10:46.141541351 +0000 UTC m=+157.025966461" watchObservedRunningTime="2025-09-29 19:10:46.143521204 +0000 UTC m=+157.027946304" Sep 29 19:10:46 crc kubenswrapper[4779]: I0929 19:10:46.150306 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 19:10:46 crc kubenswrapper[4779]: E0929 19:10:46.150462 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 19:10:46.650431692 +0000 UTC m=+157.534856792 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 19:10:46 crc kubenswrapper[4779]: I0929 19:10:46.150582 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-tgwxw\" (UID: \"08924ea4-79d3-439f-8bdb-150f807221d9\") " pod="openshift-image-registry/image-registry-697d97f7c8-tgwxw" Sep 29 19:10:46 crc kubenswrapper[4779]: E0929 19:10:46.150861 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 19:10:46.650849973 +0000 UTC m=+157.535275073 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-tgwxw" (UID: "08924ea4-79d3-439f-8bdb-150f807221d9") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 19:10:46 crc kubenswrapper[4779]: I0929 19:10:46.169871 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-px8c9" podStartSLOduration=136.169856189 podStartE2EDuration="2m16.169856189s" podCreationTimestamp="2025-09-29 19:08:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 19:10:46.168524053 +0000 UTC m=+157.052949153" watchObservedRunningTime="2025-09-29 19:10:46.169856189 +0000 UTC m=+157.054281289" Sep 29 19:10:46 crc kubenswrapper[4779]: I0929 19:10:46.251652 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 19:10:46 crc kubenswrapper[4779]: E0929 19:10:46.251852 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 19:10:46.751819172 +0000 UTC m=+157.636244272 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 19:10:46 crc kubenswrapper[4779]: I0929 19:10:46.252181 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-tgwxw\" (UID: \"08924ea4-79d3-439f-8bdb-150f807221d9\") " pod="openshift-image-registry/image-registry-697d97f7c8-tgwxw" Sep 29 19:10:46 crc kubenswrapper[4779]: E0929 19:10:46.252559 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 19:10:46.752550072 +0000 UTC m=+157.636975172 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-tgwxw" (UID: "08924ea4-79d3-439f-8bdb-150f807221d9") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 19:10:46 crc kubenswrapper[4779]: I0929 19:10:46.263733 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-bc7qj" podStartSLOduration=137.263718835 podStartE2EDuration="2m17.263718835s" podCreationTimestamp="2025-09-29 19:08:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 19:10:46.262492601 +0000 UTC m=+157.146917701" watchObservedRunningTime="2025-09-29 19:10:46.263718835 +0000 UTC m=+157.148143935" Sep 29 19:10:46 crc kubenswrapper[4779]: I0929 19:10:46.264506 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-f9d7485db-hhcst" podStartSLOduration=137.264499966 podStartE2EDuration="2m17.264499966s" podCreationTimestamp="2025-09-29 19:08:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 19:10:46.23883814 +0000 UTC m=+157.123263230" watchObservedRunningTime="2025-09-29 19:10:46.264499966 +0000 UTC m=+157.148925056" Sep 29 19:10:46 crc kubenswrapper[4779]: I0929 19:10:46.353796 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 19:10:46 crc kubenswrapper[4779]: E0929 19:10:46.354138 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 19:10:46.854123687 +0000 UTC m=+157.738548777 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 19:10:46 crc kubenswrapper[4779]: I0929 19:10:46.356358 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-lx98g" Sep 29 19:10:46 crc kubenswrapper[4779]: I0929 19:10:46.356565 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-lx98g" Sep 29 19:10:46 crc kubenswrapper[4779]: I0929 19:10:46.370071 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-lx98g" Sep 29 19:10:46 crc kubenswrapper[4779]: I0929 19:10:46.454735 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-tgwxw\" (UID: \"08924ea4-79d3-439f-8bdb-150f807221d9\") " pod="openshift-image-registry/image-registry-697d97f7c8-tgwxw" Sep 29 19:10:46 crc kubenswrapper[4779]: E0929 19:10:46.455087 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 19:10:46.955071825 +0000 UTC m=+157.839496925 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-tgwxw" (UID: "08924ea4-79d3-439f-8bdb-150f807221d9") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 19:10:46 crc kubenswrapper[4779]: I0929 19:10:46.516738 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-b57sr" event={"ID":"dc132003-e19f-4bdc-b77f-69dbe408b68f","Type":"ContainerStarted","Data":"3020b2e4b98a0b9d8f93beebaa0e7de4761feb4f0ecdd60d4921b8cfc4cbf624"} Sep 29 19:10:46 crc kubenswrapper[4779]: I0929 19:10:46.518201 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-lzrjb" event={"ID":"308f2878-9a95-4b6a-8d03-90c431c05c1f","Type":"ContainerStarted","Data":"6b3c99ab79cb46638ca5bd6795c631f055ecbe98f3c1e0297311824a4363eddc"} Sep 29 19:10:46 crc kubenswrapper[4779]: I0929 19:10:46.519034 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-config-operator/openshift-config-operator-7777fb866f-lzrjb" Sep 29 19:10:46 crc kubenswrapper[4779]: I0929 19:10:46.520464 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-cbd6x" event={"ID":"1209880f-ab8c-4964-8796-81bc1fdf803b","Type":"ContainerStarted","Data":"ebaff9e6dc1e039184de11ff2ee812f59b20a56f0c272903848165f4d7d93999"} Sep 29 19:10:46 crc kubenswrapper[4779]: I0929 19:10:46.520485 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-cbd6x" event={"ID":"1209880f-ab8c-4964-8796-81bc1fdf803b","Type":"ContainerStarted","Data":"b8b6ea0f08eb0318ea943ff52fb69d078683d52f4a69df529ebdb4cc2fb6e027"} Sep 29 19:10:46 crc kubenswrapper[4779]: I0929 19:10:46.522199 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-q75j6" event={"ID":"a64632ed-ba47-4cfb-96a9-349b32995c3a","Type":"ContainerStarted","Data":"368691b67b1d23ccf97663e342fa6db25d47ffe914c972951ca421e5ed09d578"} Sep 29 19:10:46 crc kubenswrapper[4779]: I0929 19:10:46.522222 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-q75j6" event={"ID":"a64632ed-ba47-4cfb-96a9-349b32995c3a","Type":"ContainerStarted","Data":"a3f6b018af224ecdd9236fcc850b6f5dc4aa3f26526b5288eb5a600f632a4a17"} Sep 29 19:10:46 crc kubenswrapper[4779]: I0929 19:10:46.522544 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-dns/dns-default-q75j6" Sep 29 19:10:46 crc kubenswrapper[4779]: I0929 19:10:46.524395 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-wsbvn" event={"ID":"7a54027e-55dc-42f9-aa21-1c82ec4d1b4b","Type":"ContainerStarted","Data":"0eb32f0f5263546ccd433ff07a3b16765da937fcd0acbb01e91151be15b838b8"} Sep 29 19:10:46 crc kubenswrapper[4779]: I0929 19:10:46.526458 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-rrmfq" event={"ID":"014a4efc-c5f5-42c9-b90f-557a51659e67","Type":"ContainerStarted","Data":"f4cfeb45e03ee21726d0ffbebc5ea0688607033be0ab4b70a2f73b4f2201c9ec"} Sep 29 19:10:46 crc kubenswrapper[4779]: I0929 19:10:46.528649 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-fpfpb" event={"ID":"9b795657-72fc-4b72-9186-f0dc24678b36","Type":"ContainerStarted","Data":"6d5c3ff07a3515ddf6085307ff36ba99b5082696e99a4700c76c2fd6651fed1b"} Sep 29 19:10:46 crc kubenswrapper[4779]: I0929 19:10:46.532948 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-nxttj" event={"ID":"05826cf2-7094-46a7-a08f-8f39f5fb3520","Type":"ContainerStarted","Data":"917e8eb47592987ff5a2ad2a18d62511348e67e02332c437e37e2dabe15548ec"} Sep 29 19:10:46 crc kubenswrapper[4779]: I0929 19:10:46.536171 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-hr49g" event={"ID":"8f956c33-ffd9-4517-b8d2-febf5b12f2d0","Type":"ContainerStarted","Data":"ef02dba78e5102db2cedd2166329d3f86e4e5e518f349b31ac40ca7251b53347"} Sep 29 19:10:46 crc kubenswrapper[4779]: I0929 19:10:46.537814 4779 patch_prober.go:28] interesting pod/downloads-7954f5f757-4vf95 container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.31:8080/\": dial tcp 10.217.0.31:8080: connect: connection refused" start-of-body= Sep 29 19:10:46 crc kubenswrapper[4779]: I0929 19:10:46.537849 4779 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-4vf95" podUID="078d7a8c-aa40-42c3-b26e-30dd2a01cae0" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.31:8080/\": dial tcp 10.217.0.31:8080: connect: connection refused" Sep 29 19:10:46 crc kubenswrapper[4779]: I0929 19:10:46.539213 4779 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-bt79h container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.25:8080/healthz\": dial tcp 10.217.0.25:8080: connect: connection refused" start-of-body= Sep 29 19:10:46 crc kubenswrapper[4779]: I0929 19:10:46.539257 4779 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-bt79h" podUID="ec0b9d1c-6776-4434-9a56-756a3fc1fc5e" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.25:8080/healthz\": dial tcp 10.217.0.25:8080: connect: connection refused" Sep 29 19:10:46 crc kubenswrapper[4779]: I0929 19:10:46.546500 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-kdxj8" Sep 29 19:10:46 crc kubenswrapper[4779]: I0929 19:10:46.550875 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-lx98g" Sep 29 19:10:46 crc kubenswrapper[4779]: I0929 19:10:46.552896 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-6js75" Sep 29 19:10:46 crc kubenswrapper[4779]: I0929 19:10:46.555920 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 19:10:46 crc kubenswrapper[4779]: E0929 19:10:46.556304 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 19:10:47.05629007 +0000 UTC m=+157.940715160 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 19:10:46 crc kubenswrapper[4779]: I0929 19:10:46.558271 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-config-operator/openshift-config-operator-7777fb866f-lzrjb" podStartSLOduration=137.558258954 podStartE2EDuration="2m17.558258954s" podCreationTimestamp="2025-09-29 19:08:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 19:10:46.557292147 +0000 UTC m=+157.441717237" watchObservedRunningTime="2025-09-29 19:10:46.558258954 +0000 UTC m=+157.442684054" Sep 29 19:10:46 crc kubenswrapper[4779]: I0929 19:10:46.629941 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/dns-default-q75j6" podStartSLOduration=7.629907747 podStartE2EDuration="7.629907747s" podCreationTimestamp="2025-09-29 19:10:39 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 19:10:46.629619149 +0000 UTC m=+157.514044249" watchObservedRunningTime="2025-09-29 19:10:46.629907747 +0000 UTC m=+157.514332847" Sep 29 19:10:46 crc kubenswrapper[4779]: I0929 19:10:46.630594 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-fpfpb" podStartSLOduration=137.630588375 podStartE2EDuration="2m17.630588375s" podCreationTimestamp="2025-09-29 19:08:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 19:10:46.593584572 +0000 UTC m=+157.478009672" watchObservedRunningTime="2025-09-29 19:10:46.630588375 +0000 UTC m=+157.515013475" Sep 29 19:10:46 crc kubenswrapper[4779]: I0929 19:10:46.638712 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-apiserver/apiserver-76f77b778f-nxttj" Sep 29 19:10:46 crc kubenswrapper[4779]: I0929 19:10:46.638878 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-apiserver/apiserver-76f77b778f-nxttj" Sep 29 19:10:46 crc kubenswrapper[4779]: I0929 19:10:46.639815 4779 patch_prober.go:28] interesting pod/apiserver-76f77b778f-nxttj container/openshift-apiserver namespace/openshift-apiserver: Startup probe status=failure output="Get \"https://10.217.0.12:8443/livez\": dial tcp 10.217.0.12:8443: connect: connection refused" start-of-body= Sep 29 19:10:46 crc kubenswrapper[4779]: I0929 19:10:46.639862 4779 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-apiserver/apiserver-76f77b778f-nxttj" podUID="05826cf2-7094-46a7-a08f-8f39f5fb3520" containerName="openshift-apiserver" probeResult="failure" output="Get \"https://10.217.0.12:8443/livez\": dial tcp 10.217.0.12:8443: connect: connection refused" Sep 29 19:10:46 crc kubenswrapper[4779]: I0929 19:10:46.666894 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-tgwxw\" (UID: \"08924ea4-79d3-439f-8bdb-150f807221d9\") " pod="openshift-image-registry/image-registry-697d97f7c8-tgwxw" Sep 29 19:10:46 crc kubenswrapper[4779]: E0929 19:10:46.675129 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 19:10:47.175111313 +0000 UTC m=+158.059536413 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-tgwxw" (UID: "08924ea4-79d3-439f-8bdb-150f807221d9") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 19:10:46 crc kubenswrapper[4779]: I0929 19:10:46.716803 4779 patch_prober.go:28] interesting pod/router-default-5444994796-4bzgw container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Sep 29 19:10:46 crc kubenswrapper[4779]: [-]has-synced failed: reason withheld Sep 29 19:10:46 crc kubenswrapper[4779]: [+]process-running ok Sep 29 19:10:46 crc kubenswrapper[4779]: healthz check failed Sep 29 19:10:46 crc kubenswrapper[4779]: I0929 19:10:46.716896 4779 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-4bzgw" podUID="b2767e5e-6a38-4668-95f6-f677e298c6f8" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Sep 29 19:10:46 crc kubenswrapper[4779]: I0929 19:10:46.774332 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 19:10:46 crc kubenswrapper[4779]: E0929 19:10:46.774663 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 19:10:47.274648263 +0000 UTC m=+158.159073363 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 19:10:46 crc kubenswrapper[4779]: I0929 19:10:46.875760 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-tgwxw\" (UID: \"08924ea4-79d3-439f-8bdb-150f807221d9\") " pod="openshift-image-registry/image-registry-697d97f7c8-tgwxw" Sep 29 19:10:46 crc kubenswrapper[4779]: E0929 19:10:46.876051 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 19:10:47.376040382 +0000 UTC m=+158.260465482 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-tgwxw" (UID: "08924ea4-79d3-439f-8bdb-150f807221d9") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 19:10:46 crc kubenswrapper[4779]: I0929 19:10:46.976465 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 19:10:46 crc kubenswrapper[4779]: E0929 19:10:46.976646 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 19:10:47.47661897 +0000 UTC m=+158.361044070 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 19:10:46 crc kubenswrapper[4779]: I0929 19:10:46.976696 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-tgwxw\" (UID: \"08924ea4-79d3-439f-8bdb-150f807221d9\") " pod="openshift-image-registry/image-registry-697d97f7c8-tgwxw" Sep 29 19:10:46 crc kubenswrapper[4779]: E0929 19:10:46.976984 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 19:10:47.476971829 +0000 UTC m=+158.361396929 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-tgwxw" (UID: "08924ea4-79d3-439f-8bdb-150f807221d9") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 19:10:47 crc kubenswrapper[4779]: I0929 19:10:47.077903 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 19:10:47 crc kubenswrapper[4779]: E0929 19:10:47.078082 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 19:10:47.578057171 +0000 UTC m=+158.462482271 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 19:10:47 crc kubenswrapper[4779]: I0929 19:10:47.089024 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console-operator/console-operator-58897d9998-jh4mm" Sep 29 19:10:47 crc kubenswrapper[4779]: I0929 19:10:47.179737 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-tgwxw\" (UID: \"08924ea4-79d3-439f-8bdb-150f807221d9\") " pod="openshift-image-registry/image-registry-697d97f7c8-tgwxw" Sep 29 19:10:47 crc kubenswrapper[4779]: E0929 19:10:47.180141 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 19:10:47.68012489 +0000 UTC m=+158.564549980 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-tgwxw" (UID: "08924ea4-79d3-439f-8bdb-150f807221d9") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 19:10:47 crc kubenswrapper[4779]: I0929 19:10:47.280904 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 19:10:47 crc kubenswrapper[4779]: E0929 19:10:47.281092 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 19:10:47.781063517 +0000 UTC m=+158.665488617 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 19:10:47 crc kubenswrapper[4779]: I0929 19:10:47.281239 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-tgwxw\" (UID: \"08924ea4-79d3-439f-8bdb-150f807221d9\") " pod="openshift-image-registry/image-registry-697d97f7c8-tgwxw" Sep 29 19:10:47 crc kubenswrapper[4779]: E0929 19:10:47.281607 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 19:10:47.781596422 +0000 UTC m=+158.666021522 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-tgwxw" (UID: "08924ea4-79d3-439f-8bdb-150f807221d9") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 19:10:47 crc kubenswrapper[4779]: I0929 19:10:47.381879 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 19:10:47 crc kubenswrapper[4779]: E0929 19:10:47.382065 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 19:10:47.882040426 +0000 UTC m=+158.766465526 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 19:10:47 crc kubenswrapper[4779]: I0929 19:10:47.382112 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-tgwxw\" (UID: \"08924ea4-79d3-439f-8bdb-150f807221d9\") " pod="openshift-image-registry/image-registry-697d97f7c8-tgwxw" Sep 29 19:10:47 crc kubenswrapper[4779]: E0929 19:10:47.382436 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 19:10:47.882423267 +0000 UTC m=+158.766848367 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-tgwxw" (UID: "08924ea4-79d3-439f-8bdb-150f807221d9") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 19:10:47 crc kubenswrapper[4779]: I0929 19:10:47.483620 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 19:10:47 crc kubenswrapper[4779]: E0929 19:10:47.483801 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 19:10:47.983776316 +0000 UTC m=+158.868201406 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 19:10:47 crc kubenswrapper[4779]: I0929 19:10:47.484006 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-tgwxw\" (UID: \"08924ea4-79d3-439f-8bdb-150f807221d9\") " pod="openshift-image-registry/image-registry-697d97f7c8-tgwxw" Sep 29 19:10:47 crc kubenswrapper[4779]: E0929 19:10:47.484289 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 19:10:47.984282199 +0000 UTC m=+158.868707299 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-tgwxw" (UID: "08924ea4-79d3-439f-8bdb-150f807221d9") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 19:10:47 crc kubenswrapper[4779]: I0929 19:10:47.525695 4779 patch_prober.go:28] interesting pod/packageserver-d55dfcdfc-wsbvn container/packageserver namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.38:5443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Sep 29 19:10:47 crc kubenswrapper[4779]: I0929 19:10:47.525774 4779 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-wsbvn" podUID="7a54027e-55dc-42f9-aa21-1c82ec4d1b4b" containerName="packageserver" probeResult="failure" output="Get \"https://10.217.0.38:5443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Sep 29 19:10:47 crc kubenswrapper[4779]: I0929 19:10:47.544565 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-b57sr" event={"ID":"dc132003-e19f-4bdc-b77f-69dbe408b68f","Type":"ContainerStarted","Data":"79b33b09a72a461147c8c5efc6114d100e373678bdcd36c9a381e502388fa677"} Sep 29 19:10:47 crc kubenswrapper[4779]: I0929 19:10:47.545131 4779 patch_prober.go:28] interesting pod/downloads-7954f5f757-4vf95 container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.31:8080/\": dial tcp 10.217.0.31:8080: connect: connection refused" start-of-body= Sep 29 19:10:47 crc kubenswrapper[4779]: I0929 19:10:47.545178 4779 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-4vf95" podUID="078d7a8c-aa40-42c3-b26e-30dd2a01cae0" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.31:8080/\": dial tcp 10.217.0.31:8080: connect: connection refused" Sep 29 19:10:47 crc kubenswrapper[4779]: I0929 19:10:47.545441 4779 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-bt79h container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.25:8080/healthz\": dial tcp 10.217.0.25:8080: connect: connection refused" start-of-body= Sep 29 19:10:47 crc kubenswrapper[4779]: I0929 19:10:47.545479 4779 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-bt79h" podUID="ec0b9d1c-6776-4434-9a56-756a3fc1fc5e" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.25:8080/healthz\": dial tcp 10.217.0.25:8080: connect: connection refused" Sep 29 19:10:47 crc kubenswrapper[4779]: I0929 19:10:47.585351 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 19:10:47 crc kubenswrapper[4779]: E0929 19:10:47.585559 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 19:10:48.085534896 +0000 UTC m=+158.969959986 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 19:10:47 crc kubenswrapper[4779]: I0929 19:10:47.585673 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-tgwxw\" (UID: \"08924ea4-79d3-439f-8bdb-150f807221d9\") " pod="openshift-image-registry/image-registry-697d97f7c8-tgwxw" Sep 29 19:10:47 crc kubenswrapper[4779]: E0929 19:10:47.586084 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 19:10:48.08607183 +0000 UTC m=+158.970496920 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-tgwxw" (UID: "08924ea4-79d3-439f-8bdb-150f807221d9") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 19:10:47 crc kubenswrapper[4779]: I0929 19:10:47.686829 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 19:10:47 crc kubenswrapper[4779]: E0929 19:10:47.687441 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 19:10:48.187411479 +0000 UTC m=+159.071836579 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 19:10:47 crc kubenswrapper[4779]: I0929 19:10:47.687635 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-tgwxw\" (UID: \"08924ea4-79d3-439f-8bdb-150f807221d9\") " pod="openshift-image-registry/image-registry-697d97f7c8-tgwxw" Sep 29 19:10:47 crc kubenswrapper[4779]: E0929 19:10:47.688619 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 19:10:48.188600901 +0000 UTC m=+159.073025991 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-tgwxw" (UID: "08924ea4-79d3-439f-8bdb-150f807221d9") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 19:10:47 crc kubenswrapper[4779]: I0929 19:10:47.713285 4779 patch_prober.go:28] interesting pod/router-default-5444994796-4bzgw container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Sep 29 19:10:47 crc kubenswrapper[4779]: [-]has-synced failed: reason withheld Sep 29 19:10:47 crc kubenswrapper[4779]: [+]process-running ok Sep 29 19:10:47 crc kubenswrapper[4779]: healthz check failed Sep 29 19:10:47 crc kubenswrapper[4779]: I0929 19:10:47.713562 4779 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-4bzgw" podUID="b2767e5e-6a38-4668-95f6-f677e298c6f8" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Sep 29 19:10:47 crc kubenswrapper[4779]: I0929 19:10:47.788943 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 19:10:47 crc kubenswrapper[4779]: E0929 19:10:47.789252 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 19:10:48.289237591 +0000 UTC m=+159.173662691 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 19:10:47 crc kubenswrapper[4779]: I0929 19:10:47.843626 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-wsbvn" Sep 29 19:10:47 crc kubenswrapper[4779]: I0929 19:10:47.899297 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-tgwxw\" (UID: \"08924ea4-79d3-439f-8bdb-150f807221d9\") " pod="openshift-image-registry/image-registry-697d97f7c8-tgwxw" Sep 29 19:10:47 crc kubenswrapper[4779]: E0929 19:10:47.899688 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 19:10:48.399675326 +0000 UTC m=+159.284100426 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-tgwxw" (UID: "08924ea4-79d3-439f-8bdb-150f807221d9") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 19:10:48 crc kubenswrapper[4779]: I0929 19:10:48.003606 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 19:10:48 crc kubenswrapper[4779]: E0929 19:10:48.004089 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 19:10:48.504054958 +0000 UTC m=+159.388480088 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 19:10:48 crc kubenswrapper[4779]: I0929 19:10:48.014298 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-rxml4"] Sep 29 19:10:48 crc kubenswrapper[4779]: I0929 19:10:48.015176 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-rxml4" Sep 29 19:10:48 crc kubenswrapper[4779]: I0929 19:10:48.024023 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Sep 29 19:10:48 crc kubenswrapper[4779]: I0929 19:10:48.063465 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-rxml4"] Sep 29 19:10:48 crc kubenswrapper[4779]: I0929 19:10:48.104929 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/79af1e0e-dad6-413a-80ea-699ef7f5cdad-utilities\") pod \"certified-operators-rxml4\" (UID: \"79af1e0e-dad6-413a-80ea-699ef7f5cdad\") " pod="openshift-marketplace/certified-operators-rxml4" Sep 29 19:10:48 crc kubenswrapper[4779]: I0929 19:10:48.104995 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-tgwxw\" (UID: \"08924ea4-79d3-439f-8bdb-150f807221d9\") " pod="openshift-image-registry/image-registry-697d97f7c8-tgwxw" Sep 29 19:10:48 crc kubenswrapper[4779]: I0929 19:10:48.105032 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/79af1e0e-dad6-413a-80ea-699ef7f5cdad-catalog-content\") pod \"certified-operators-rxml4\" (UID: \"79af1e0e-dad6-413a-80ea-699ef7f5cdad\") " pod="openshift-marketplace/certified-operators-rxml4" Sep 29 19:10:48 crc kubenswrapper[4779]: I0929 19:10:48.105073 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hct62\" (UniqueName: \"kubernetes.io/projected/79af1e0e-dad6-413a-80ea-699ef7f5cdad-kube-api-access-hct62\") pod \"certified-operators-rxml4\" (UID: \"79af1e0e-dad6-413a-80ea-699ef7f5cdad\") " pod="openshift-marketplace/certified-operators-rxml4" Sep 29 19:10:48 crc kubenswrapper[4779]: E0929 19:10:48.105349 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 19:10:48.605337525 +0000 UTC m=+159.489762625 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-tgwxw" (UID: "08924ea4-79d3-439f-8bdb-150f807221d9") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 19:10:48 crc kubenswrapper[4779]: I0929 19:10:48.193852 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-4b959"] Sep 29 19:10:48 crc kubenswrapper[4779]: I0929 19:10:48.194756 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-4b959" Sep 29 19:10:48 crc kubenswrapper[4779]: I0929 19:10:48.200082 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Sep 29 19:10:48 crc kubenswrapper[4779]: I0929 19:10:48.205605 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 19:10:48 crc kubenswrapper[4779]: I0929 19:10:48.205763 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/79af1e0e-dad6-413a-80ea-699ef7f5cdad-catalog-content\") pod \"certified-operators-rxml4\" (UID: \"79af1e0e-dad6-413a-80ea-699ef7f5cdad\") " pod="openshift-marketplace/certified-operators-rxml4" Sep 29 19:10:48 crc kubenswrapper[4779]: E0929 19:10:48.205814 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 19:10:48.705783359 +0000 UTC m=+159.590208459 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 19:10:48 crc kubenswrapper[4779]: I0929 19:10:48.205919 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hct62\" (UniqueName: \"kubernetes.io/projected/79af1e0e-dad6-413a-80ea-699ef7f5cdad-kube-api-access-hct62\") pod \"certified-operators-rxml4\" (UID: \"79af1e0e-dad6-413a-80ea-699ef7f5cdad\") " pod="openshift-marketplace/certified-operators-rxml4" Sep 29 19:10:48 crc kubenswrapper[4779]: I0929 19:10:48.205973 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/79af1e0e-dad6-413a-80ea-699ef7f5cdad-utilities\") pod \"certified-operators-rxml4\" (UID: \"79af1e0e-dad6-413a-80ea-699ef7f5cdad\") " pod="openshift-marketplace/certified-operators-rxml4" Sep 29 19:10:48 crc kubenswrapper[4779]: I0929 19:10:48.206128 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/79af1e0e-dad6-413a-80ea-699ef7f5cdad-catalog-content\") pod \"certified-operators-rxml4\" (UID: \"79af1e0e-dad6-413a-80ea-699ef7f5cdad\") " pod="openshift-marketplace/certified-operators-rxml4" Sep 29 19:10:48 crc kubenswrapper[4779]: I0929 19:10:48.206652 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/79af1e0e-dad6-413a-80ea-699ef7f5cdad-utilities\") pod \"certified-operators-rxml4\" (UID: \"79af1e0e-dad6-413a-80ea-699ef7f5cdad\") " pod="openshift-marketplace/certified-operators-rxml4" Sep 29 19:10:48 crc kubenswrapper[4779]: I0929 19:10:48.209482 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-4b959"] Sep 29 19:10:48 crc kubenswrapper[4779]: I0929 19:10:48.252007 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hct62\" (UniqueName: \"kubernetes.io/projected/79af1e0e-dad6-413a-80ea-699ef7f5cdad-kube-api-access-hct62\") pod \"certified-operators-rxml4\" (UID: \"79af1e0e-dad6-413a-80ea-699ef7f5cdad\") " pod="openshift-marketplace/certified-operators-rxml4" Sep 29 19:10:48 crc kubenswrapper[4779]: I0929 19:10:48.306936 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6cf06f8e-1088-4c70-809f-c4b1a55e96e5-utilities\") pod \"community-operators-4b959\" (UID: \"6cf06f8e-1088-4c70-809f-c4b1a55e96e5\") " pod="openshift-marketplace/community-operators-4b959" Sep 29 19:10:48 crc kubenswrapper[4779]: I0929 19:10:48.306987 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-tgwxw\" (UID: \"08924ea4-79d3-439f-8bdb-150f807221d9\") " pod="openshift-image-registry/image-registry-697d97f7c8-tgwxw" Sep 29 19:10:48 crc kubenswrapper[4779]: I0929 19:10:48.307020 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5nd9z\" (UniqueName: \"kubernetes.io/projected/6cf06f8e-1088-4c70-809f-c4b1a55e96e5-kube-api-access-5nd9z\") pod \"community-operators-4b959\" (UID: \"6cf06f8e-1088-4c70-809f-c4b1a55e96e5\") " pod="openshift-marketplace/community-operators-4b959" Sep 29 19:10:48 crc kubenswrapper[4779]: I0929 19:10:48.307052 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6cf06f8e-1088-4c70-809f-c4b1a55e96e5-catalog-content\") pod \"community-operators-4b959\" (UID: \"6cf06f8e-1088-4c70-809f-c4b1a55e96e5\") " pod="openshift-marketplace/community-operators-4b959" Sep 29 19:10:48 crc kubenswrapper[4779]: E0929 19:10:48.307382 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 19:10:48.807364634 +0000 UTC m=+159.691789734 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-tgwxw" (UID: "08924ea4-79d3-439f-8bdb-150f807221d9") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 19:10:48 crc kubenswrapper[4779]: I0929 19:10:48.368795 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-rxml4" Sep 29 19:10:48 crc kubenswrapper[4779]: I0929 19:10:48.408663 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 19:10:48 crc kubenswrapper[4779]: E0929 19:10:48.408807 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 19:10:48.908782575 +0000 UTC m=+159.793207675 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 19:10:48 crc kubenswrapper[4779]: I0929 19:10:48.408841 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6cf06f8e-1088-4c70-809f-c4b1a55e96e5-utilities\") pod \"community-operators-4b959\" (UID: \"6cf06f8e-1088-4c70-809f-c4b1a55e96e5\") " pod="openshift-marketplace/community-operators-4b959" Sep 29 19:10:48 crc kubenswrapper[4779]: I0929 19:10:48.408881 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-tgwxw\" (UID: \"08924ea4-79d3-439f-8bdb-150f807221d9\") " pod="openshift-image-registry/image-registry-697d97f7c8-tgwxw" Sep 29 19:10:48 crc kubenswrapper[4779]: I0929 19:10:48.408913 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5nd9z\" (UniqueName: \"kubernetes.io/projected/6cf06f8e-1088-4c70-809f-c4b1a55e96e5-kube-api-access-5nd9z\") pod \"community-operators-4b959\" (UID: \"6cf06f8e-1088-4c70-809f-c4b1a55e96e5\") " pod="openshift-marketplace/community-operators-4b959" Sep 29 19:10:48 crc kubenswrapper[4779]: I0929 19:10:48.408932 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6cf06f8e-1088-4c70-809f-c4b1a55e96e5-catalog-content\") pod \"community-operators-4b959\" (UID: \"6cf06f8e-1088-4c70-809f-c4b1a55e96e5\") " pod="openshift-marketplace/community-operators-4b959" Sep 29 19:10:48 crc kubenswrapper[4779]: E0929 19:10:48.409341 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 19:10:48.909291719 +0000 UTC m=+159.793716859 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-tgwxw" (UID: "08924ea4-79d3-439f-8bdb-150f807221d9") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 19:10:48 crc kubenswrapper[4779]: I0929 19:10:48.410181 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6cf06f8e-1088-4c70-809f-c4b1a55e96e5-utilities\") pod \"community-operators-4b959\" (UID: \"6cf06f8e-1088-4c70-809f-c4b1a55e96e5\") " pod="openshift-marketplace/community-operators-4b959" Sep 29 19:10:48 crc kubenswrapper[4779]: I0929 19:10:48.410235 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6cf06f8e-1088-4c70-809f-c4b1a55e96e5-catalog-content\") pod \"community-operators-4b959\" (UID: \"6cf06f8e-1088-4c70-809f-c4b1a55e96e5\") " pod="openshift-marketplace/community-operators-4b959" Sep 29 19:10:48 crc kubenswrapper[4779]: I0929 19:10:48.425426 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-22bm4"] Sep 29 19:10:48 crc kubenswrapper[4779]: I0929 19:10:48.426303 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-22bm4" Sep 29 19:10:48 crc kubenswrapper[4779]: I0929 19:10:48.442978 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-22bm4"] Sep 29 19:10:48 crc kubenswrapper[4779]: I0929 19:10:48.452369 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5nd9z\" (UniqueName: \"kubernetes.io/projected/6cf06f8e-1088-4c70-809f-c4b1a55e96e5-kube-api-access-5nd9z\") pod \"community-operators-4b959\" (UID: \"6cf06f8e-1088-4c70-809f-c4b1a55e96e5\") " pod="openshift-marketplace/community-operators-4b959" Sep 29 19:10:48 crc kubenswrapper[4779]: I0929 19:10:48.507111 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-4b959" Sep 29 19:10:48 crc kubenswrapper[4779]: I0929 19:10:48.510458 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 19:10:48 crc kubenswrapper[4779]: E0929 19:10:48.510564 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 19:10:49.010545375 +0000 UTC m=+159.894970465 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 19:10:48 crc kubenswrapper[4779]: I0929 19:10:48.510868 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-tgwxw\" (UID: \"08924ea4-79d3-439f-8bdb-150f807221d9\") " pod="openshift-image-registry/image-registry-697d97f7c8-tgwxw" Sep 29 19:10:48 crc kubenswrapper[4779]: E0929 19:10:48.511266 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 19:10:49.011250774 +0000 UTC m=+159.895675874 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-tgwxw" (UID: "08924ea4-79d3-439f-8bdb-150f807221d9") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 19:10:48 crc kubenswrapper[4779]: I0929 19:10:48.597912 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-b57sr" event={"ID":"dc132003-e19f-4bdc-b77f-69dbe408b68f","Type":"ContainerStarted","Data":"5d7fe5b4e11f50ab1ed93d868957d0d2f10cd306e4f3e47c01e2c62325dd75c4"} Sep 29 19:10:48 crc kubenswrapper[4779]: I0929 19:10:48.597949 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-b57sr" event={"ID":"dc132003-e19f-4bdc-b77f-69dbe408b68f","Type":"ContainerStarted","Data":"1000d5b1bfaff85631e8fbe1d329d1ae349fe2bc830fdae9663d7e3dbdb4d683"} Sep 29 19:10:48 crc kubenswrapper[4779]: I0929 19:10:48.613447 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 19:10:48 crc kubenswrapper[4779]: I0929 19:10:48.613661 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dw9kg\" (UniqueName: \"kubernetes.io/projected/68227c08-072e-4071-9206-2f34b9e9f1cf-kube-api-access-dw9kg\") pod \"certified-operators-22bm4\" (UID: \"68227c08-072e-4071-9206-2f34b9e9f1cf\") " pod="openshift-marketplace/certified-operators-22bm4" Sep 29 19:10:48 crc kubenswrapper[4779]: I0929 19:10:48.613690 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/68227c08-072e-4071-9206-2f34b9e9f1cf-catalog-content\") pod \"certified-operators-22bm4\" (UID: \"68227c08-072e-4071-9206-2f34b9e9f1cf\") " pod="openshift-marketplace/certified-operators-22bm4" Sep 29 19:10:48 crc kubenswrapper[4779]: I0929 19:10:48.613706 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/68227c08-072e-4071-9206-2f34b9e9f1cf-utilities\") pod \"certified-operators-22bm4\" (UID: \"68227c08-072e-4071-9206-2f34b9e9f1cf\") " pod="openshift-marketplace/certified-operators-22bm4" Sep 29 19:10:48 crc kubenswrapper[4779]: E0929 19:10:48.613850 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 19:10:49.113834627 +0000 UTC m=+159.998259727 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 19:10:48 crc kubenswrapper[4779]: I0929 19:10:48.615238 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-xshk2"] Sep 29 19:10:48 crc kubenswrapper[4779]: I0929 19:10:48.626210 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-config-operator/openshift-config-operator-7777fb866f-lzrjb" Sep 29 19:10:48 crc kubenswrapper[4779]: I0929 19:10:48.626296 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-xshk2" Sep 29 19:10:48 crc kubenswrapper[4779]: I0929 19:10:48.645490 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-xshk2"] Sep 29 19:10:48 crc kubenswrapper[4779]: I0929 19:10:48.712627 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="hostpath-provisioner/csi-hostpathplugin-b57sr" podStartSLOduration=9.712610956 podStartE2EDuration="9.712610956s" podCreationTimestamp="2025-09-29 19:10:39 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 19:10:48.673448524 +0000 UTC m=+159.557873634" watchObservedRunningTime="2025-09-29 19:10:48.712610956 +0000 UTC m=+159.597036056" Sep 29 19:10:48 crc kubenswrapper[4779]: I0929 19:10:48.714818 4779 patch_prober.go:28] interesting pod/router-default-5444994796-4bzgw container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Sep 29 19:10:48 crc kubenswrapper[4779]: [-]has-synced failed: reason withheld Sep 29 19:10:48 crc kubenswrapper[4779]: [+]process-running ok Sep 29 19:10:48 crc kubenswrapper[4779]: healthz check failed Sep 29 19:10:48 crc kubenswrapper[4779]: I0929 19:10:48.714865 4779 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-4bzgw" podUID="b2767e5e-6a38-4668-95f6-f677e298c6f8" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Sep 29 19:10:48 crc kubenswrapper[4779]: I0929 19:10:48.715337 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dw9kg\" (UniqueName: \"kubernetes.io/projected/68227c08-072e-4071-9206-2f34b9e9f1cf-kube-api-access-dw9kg\") pod \"certified-operators-22bm4\" (UID: \"68227c08-072e-4071-9206-2f34b9e9f1cf\") " pod="openshift-marketplace/certified-operators-22bm4" Sep 29 19:10:48 crc kubenswrapper[4779]: I0929 19:10:48.715380 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/68227c08-072e-4071-9206-2f34b9e9f1cf-utilities\") pod \"certified-operators-22bm4\" (UID: \"68227c08-072e-4071-9206-2f34b9e9f1cf\") " pod="openshift-marketplace/certified-operators-22bm4" Sep 29 19:10:48 crc kubenswrapper[4779]: I0929 19:10:48.715395 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/68227c08-072e-4071-9206-2f34b9e9f1cf-catalog-content\") pod \"certified-operators-22bm4\" (UID: \"68227c08-072e-4071-9206-2f34b9e9f1cf\") " pod="openshift-marketplace/certified-operators-22bm4" Sep 29 19:10:48 crc kubenswrapper[4779]: I0929 19:10:48.715457 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-tgwxw\" (UID: \"08924ea4-79d3-439f-8bdb-150f807221d9\") " pod="openshift-image-registry/image-registry-697d97f7c8-tgwxw" Sep 29 19:10:48 crc kubenswrapper[4779]: E0929 19:10:48.717226 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 19:10:49.217214321 +0000 UTC m=+160.101639411 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-tgwxw" (UID: "08924ea4-79d3-439f-8bdb-150f807221d9") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 19:10:48 crc kubenswrapper[4779]: I0929 19:10:48.718605 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/68227c08-072e-4071-9206-2f34b9e9f1cf-utilities\") pod \"certified-operators-22bm4\" (UID: \"68227c08-072e-4071-9206-2f34b9e9f1cf\") " pod="openshift-marketplace/certified-operators-22bm4" Sep 29 19:10:48 crc kubenswrapper[4779]: I0929 19:10:48.718812 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/68227c08-072e-4071-9206-2f34b9e9f1cf-catalog-content\") pod \"certified-operators-22bm4\" (UID: \"68227c08-072e-4071-9206-2f34b9e9f1cf\") " pod="openshift-marketplace/certified-operators-22bm4" Sep 29 19:10:48 crc kubenswrapper[4779]: I0929 19:10:48.759180 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dw9kg\" (UniqueName: \"kubernetes.io/projected/68227c08-072e-4071-9206-2f34b9e9f1cf-kube-api-access-dw9kg\") pod \"certified-operators-22bm4\" (UID: \"68227c08-072e-4071-9206-2f34b9e9f1cf\") " pod="openshift-marketplace/certified-operators-22bm4" Sep 29 19:10:48 crc kubenswrapper[4779]: I0929 19:10:48.816974 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 19:10:48 crc kubenswrapper[4779]: I0929 19:10:48.817136 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wcfz6\" (UniqueName: \"kubernetes.io/projected/a0becbd8-a5ce-42a3-9afe-137668fb98fa-kube-api-access-wcfz6\") pod \"community-operators-xshk2\" (UID: \"a0becbd8-a5ce-42a3-9afe-137668fb98fa\") " pod="openshift-marketplace/community-operators-xshk2" Sep 29 19:10:48 crc kubenswrapper[4779]: I0929 19:10:48.817190 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a0becbd8-a5ce-42a3-9afe-137668fb98fa-catalog-content\") pod \"community-operators-xshk2\" (UID: \"a0becbd8-a5ce-42a3-9afe-137668fb98fa\") " pod="openshift-marketplace/community-operators-xshk2" Sep 29 19:10:48 crc kubenswrapper[4779]: I0929 19:10:48.817239 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a0becbd8-a5ce-42a3-9afe-137668fb98fa-utilities\") pod \"community-operators-xshk2\" (UID: \"a0becbd8-a5ce-42a3-9afe-137668fb98fa\") " pod="openshift-marketplace/community-operators-xshk2" Sep 29 19:10:48 crc kubenswrapper[4779]: E0929 19:10:48.817350 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 19:10:49.317333886 +0000 UTC m=+160.201758986 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 19:10:48 crc kubenswrapper[4779]: I0929 19:10:48.920764 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wcfz6\" (UniqueName: \"kubernetes.io/projected/a0becbd8-a5ce-42a3-9afe-137668fb98fa-kube-api-access-wcfz6\") pod \"community-operators-xshk2\" (UID: \"a0becbd8-a5ce-42a3-9afe-137668fb98fa\") " pod="openshift-marketplace/community-operators-xshk2" Sep 29 19:10:48 crc kubenswrapper[4779]: I0929 19:10:48.920998 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-tgwxw\" (UID: \"08924ea4-79d3-439f-8bdb-150f807221d9\") " pod="openshift-image-registry/image-registry-697d97f7c8-tgwxw" Sep 29 19:10:48 crc kubenswrapper[4779]: I0929 19:10:48.921031 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a0becbd8-a5ce-42a3-9afe-137668fb98fa-catalog-content\") pod \"community-operators-xshk2\" (UID: \"a0becbd8-a5ce-42a3-9afe-137668fb98fa\") " pod="openshift-marketplace/community-operators-xshk2" Sep 29 19:10:48 crc kubenswrapper[4779]: I0929 19:10:48.921079 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a0becbd8-a5ce-42a3-9afe-137668fb98fa-utilities\") pod \"community-operators-xshk2\" (UID: \"a0becbd8-a5ce-42a3-9afe-137668fb98fa\") " pod="openshift-marketplace/community-operators-xshk2" Sep 29 19:10:48 crc kubenswrapper[4779]: I0929 19:10:48.921489 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a0becbd8-a5ce-42a3-9afe-137668fb98fa-utilities\") pod \"community-operators-xshk2\" (UID: \"a0becbd8-a5ce-42a3-9afe-137668fb98fa\") " pod="openshift-marketplace/community-operators-xshk2" Sep 29 19:10:48 crc kubenswrapper[4779]: E0929 19:10:48.921647 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 19:10:49.421632255 +0000 UTC m=+160.306057345 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-tgwxw" (UID: "08924ea4-79d3-439f-8bdb-150f807221d9") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 19:10:48 crc kubenswrapper[4779]: I0929 19:10:48.921702 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a0becbd8-a5ce-42a3-9afe-137668fb98fa-catalog-content\") pod \"community-operators-xshk2\" (UID: \"a0becbd8-a5ce-42a3-9afe-137668fb98fa\") " pod="openshift-marketplace/community-operators-xshk2" Sep 29 19:10:48 crc kubenswrapper[4779]: I0929 19:10:48.960531 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wcfz6\" (UniqueName: \"kubernetes.io/projected/a0becbd8-a5ce-42a3-9afe-137668fb98fa-kube-api-access-wcfz6\") pod \"community-operators-xshk2\" (UID: \"a0becbd8-a5ce-42a3-9afe-137668fb98fa\") " pod="openshift-marketplace/community-operators-xshk2" Sep 29 19:10:48 crc kubenswrapper[4779]: I0929 19:10:48.970899 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-rxml4"] Sep 29 19:10:49 crc kubenswrapper[4779]: I0929 19:10:49.007654 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-xshk2" Sep 29 19:10:49 crc kubenswrapper[4779]: I0929 19:10:49.023114 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 19:10:49 crc kubenswrapper[4779]: E0929 19:10:49.023501 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 19:10:49.523485488 +0000 UTC m=+160.407910588 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 19:10:49 crc kubenswrapper[4779]: I0929 19:10:49.037395 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-4b959"] Sep 29 19:10:49 crc kubenswrapper[4779]: I0929 19:10:49.039491 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-22bm4" Sep 29 19:10:49 crc kubenswrapper[4779]: W0929 19:10:49.057654 4779 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod6cf06f8e_1088_4c70_809f_c4b1a55e96e5.slice/crio-0fe3ef18a5cdd947651af07fc4c43f15097b4472927aa691930d4b852f0df960 WatchSource:0}: Error finding container 0fe3ef18a5cdd947651af07fc4c43f15097b4472927aa691930d4b852f0df960: Status 404 returned error can't find the container with id 0fe3ef18a5cdd947651af07fc4c43f15097b4472927aa691930d4b852f0df960 Sep 29 19:10:49 crc kubenswrapper[4779]: I0929 19:10:49.072538 4779 plugin_watcher.go:194] "Adding socket path or updating timestamp to desired state cache" path="/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock" Sep 29 19:10:49 crc kubenswrapper[4779]: I0929 19:10:49.124251 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-tgwxw\" (UID: \"08924ea4-79d3-439f-8bdb-150f807221d9\") " pod="openshift-image-registry/image-registry-697d97f7c8-tgwxw" Sep 29 19:10:49 crc kubenswrapper[4779]: E0929 19:10:49.124833 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 19:10:49.624805656 +0000 UTC m=+160.509230756 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-tgwxw" (UID: "08924ea4-79d3-439f-8bdb-150f807221d9") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 19:10:49 crc kubenswrapper[4779]: I0929 19:10:49.228600 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 19:10:49 crc kubenswrapper[4779]: E0929 19:10:49.229134 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 19:10:49.729119096 +0000 UTC m=+160.613544196 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 19:10:49 crc kubenswrapper[4779]: I0929 19:10:49.296365 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-xshk2"] Sep 29 19:10:49 crc kubenswrapper[4779]: I0929 19:10:49.330006 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-tgwxw\" (UID: \"08924ea4-79d3-439f-8bdb-150f807221d9\") " pod="openshift-image-registry/image-registry-697d97f7c8-tgwxw" Sep 29 19:10:49 crc kubenswrapper[4779]: E0929 19:10:49.330414 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 19:10:49.830396793 +0000 UTC m=+160.714821893 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-tgwxw" (UID: "08924ea4-79d3-439f-8bdb-150f807221d9") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 19:10:49 crc kubenswrapper[4779]: I0929 19:10:49.376706 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Sep 29 19:10:49 crc kubenswrapper[4779]: I0929 19:10:49.377539 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Sep 29 19:10:49 crc kubenswrapper[4779]: I0929 19:10:49.379367 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Sep 29 19:10:49 crc kubenswrapper[4779]: I0929 19:10:49.379552 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Sep 29 19:10:49 crc kubenswrapper[4779]: I0929 19:10:49.379950 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-22bm4"] Sep 29 19:10:49 crc kubenswrapper[4779]: I0929 19:10:49.388010 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Sep 29 19:10:49 crc kubenswrapper[4779]: I0929 19:10:49.431436 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 19:10:49 crc kubenswrapper[4779]: E0929 19:10:49.431602 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 19:10:49.931543306 +0000 UTC m=+160.815968416 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 19:10:49 crc kubenswrapper[4779]: I0929 19:10:49.431659 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-tgwxw\" (UID: \"08924ea4-79d3-439f-8bdb-150f807221d9\") " pod="openshift-image-registry/image-registry-697d97f7c8-tgwxw" Sep 29 19:10:49 crc kubenswrapper[4779]: E0929 19:10:49.431928 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 19:10:49.931917086 +0000 UTC m=+160.816342186 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-tgwxw" (UID: "08924ea4-79d3-439f-8bdb-150f807221d9") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 19:10:49 crc kubenswrapper[4779]: I0929 19:10:49.533592 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 19:10:49 crc kubenswrapper[4779]: I0929 19:10:49.533874 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/f188a3c4-156e-4269-87c4-48826a003674-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"f188a3c4-156e-4269-87c4-48826a003674\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Sep 29 19:10:49 crc kubenswrapper[4779]: E0929 19:10:49.533904 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 19:10:50.033863151 +0000 UTC m=+160.918288291 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 19:10:49 crc kubenswrapper[4779]: I0929 19:10:49.533988 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/f188a3c4-156e-4269-87c4-48826a003674-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"f188a3c4-156e-4269-87c4-48826a003674\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Sep 29 19:10:49 crc kubenswrapper[4779]: I0929 19:10:49.534098 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-tgwxw\" (UID: \"08924ea4-79d3-439f-8bdb-150f807221d9\") " pod="openshift-image-registry/image-registry-697d97f7c8-tgwxw" Sep 29 19:10:49 crc kubenswrapper[4779]: E0929 19:10:49.534507 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 19:10:50.034492018 +0000 UTC m=+160.918917118 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-tgwxw" (UID: "08924ea4-79d3-439f-8bdb-150f807221d9") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 19:10:49 crc kubenswrapper[4779]: I0929 19:10:49.602465 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-4b959" event={"ID":"6cf06f8e-1088-4c70-809f-c4b1a55e96e5","Type":"ContainerStarted","Data":"0fe3ef18a5cdd947651af07fc4c43f15097b4472927aa691930d4b852f0df960"} Sep 29 19:10:49 crc kubenswrapper[4779]: I0929 19:10:49.605808 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-22bm4" event={"ID":"68227c08-072e-4071-9206-2f34b9e9f1cf","Type":"ContainerStarted","Data":"f5e51fcb59d3966d3ceb7b3d5d556c5e49515af393b2460673adfb4fa7c9350c"} Sep 29 19:10:49 crc kubenswrapper[4779]: I0929 19:10:49.606743 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-xshk2" event={"ID":"a0becbd8-a5ce-42a3-9afe-137668fb98fa","Type":"ContainerStarted","Data":"8a67cc0f4136797177f0160d747b83e388aa1576bf545e18b846dd5fe0607613"} Sep 29 19:10:49 crc kubenswrapper[4779]: I0929 19:10:49.614266 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-rxml4" event={"ID":"79af1e0e-dad6-413a-80ea-699ef7f5cdad","Type":"ContainerStarted","Data":"ea8e919eee625b008dc50d16ef0cf486bb78bbcc68e6a65733ec0db089e4a40d"} Sep 29 19:10:49 crc kubenswrapper[4779]: I0929 19:10:49.635615 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 19:10:49 crc kubenswrapper[4779]: E0929 19:10:49.635766 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 19:10:50.135744965 +0000 UTC m=+161.020170065 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 19:10:49 crc kubenswrapper[4779]: I0929 19:10:49.635817 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/f188a3c4-156e-4269-87c4-48826a003674-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"f188a3c4-156e-4269-87c4-48826a003674\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Sep 29 19:10:49 crc kubenswrapper[4779]: I0929 19:10:49.635893 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/f188a3c4-156e-4269-87c4-48826a003674-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"f188a3c4-156e-4269-87c4-48826a003674\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Sep 29 19:10:49 crc kubenswrapper[4779]: I0929 19:10:49.635930 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/f188a3c4-156e-4269-87c4-48826a003674-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"f188a3c4-156e-4269-87c4-48826a003674\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Sep 29 19:10:49 crc kubenswrapper[4779]: I0929 19:10:49.635968 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-tgwxw\" (UID: \"08924ea4-79d3-439f-8bdb-150f807221d9\") " pod="openshift-image-registry/image-registry-697d97f7c8-tgwxw" Sep 29 19:10:49 crc kubenswrapper[4779]: E0929 19:10:49.636293 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 19:10:50.136280219 +0000 UTC m=+161.020705319 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-tgwxw" (UID: "08924ea4-79d3-439f-8bdb-150f807221d9") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 19:10:49 crc kubenswrapper[4779]: I0929 19:10:49.657058 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/f188a3c4-156e-4269-87c4-48826a003674-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"f188a3c4-156e-4269-87c4-48826a003674\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Sep 29 19:10:49 crc kubenswrapper[4779]: I0929 19:10:49.695928 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Sep 29 19:10:49 crc kubenswrapper[4779]: I0929 19:10:49.708352 4779 patch_prober.go:28] interesting pod/router-default-5444994796-4bzgw container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Sep 29 19:10:49 crc kubenswrapper[4779]: [-]has-synced failed: reason withheld Sep 29 19:10:49 crc kubenswrapper[4779]: [+]process-running ok Sep 29 19:10:49 crc kubenswrapper[4779]: healthz check failed Sep 29 19:10:49 crc kubenswrapper[4779]: I0929 19:10:49.708432 4779 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-4bzgw" podUID="b2767e5e-6a38-4668-95f6-f677e298c6f8" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Sep 29 19:10:49 crc kubenswrapper[4779]: I0929 19:10:49.736820 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 19:10:49 crc kubenswrapper[4779]: E0929 19:10:49.736985 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 19:10:50.23695374 +0000 UTC m=+161.121378840 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 19:10:49 crc kubenswrapper[4779]: I0929 19:10:49.737117 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-tgwxw\" (UID: \"08924ea4-79d3-439f-8bdb-150f807221d9\") " pod="openshift-image-registry/image-registry-697d97f7c8-tgwxw" Sep 29 19:10:49 crc kubenswrapper[4779]: E0929 19:10:49.737931 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 19:10:50.237918126 +0000 UTC m=+161.122343226 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-tgwxw" (UID: "08924ea4-79d3-439f-8bdb-150f807221d9") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 19:10:49 crc kubenswrapper[4779]: I0929 19:10:49.840901 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 19:10:49 crc kubenswrapper[4779]: E0929 19:10:49.841060 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 19:10:50.341038103 +0000 UTC m=+161.225463203 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 19:10:49 crc kubenswrapper[4779]: I0929 19:10:49.841281 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-tgwxw\" (UID: \"08924ea4-79d3-439f-8bdb-150f807221d9\") " pod="openshift-image-registry/image-registry-697d97f7c8-tgwxw" Sep 29 19:10:49 crc kubenswrapper[4779]: E0929 19:10:49.841649 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 19:10:50.341631509 +0000 UTC m=+161.226056599 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-tgwxw" (UID: "08924ea4-79d3-439f-8bdb-150f807221d9") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 19:10:49 crc kubenswrapper[4779]: I0929 19:10:49.904484 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Sep 29 19:10:49 crc kubenswrapper[4779]: I0929 19:10:49.943094 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 19:10:49 crc kubenswrapper[4779]: E0929 19:10:49.944086 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 19:10:50.444036827 +0000 UTC m=+161.328461947 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 19:10:49 crc kubenswrapper[4779]: W0929 19:10:49.951602 4779 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-podf188a3c4_156e_4269_87c4_48826a003674.slice/crio-6ad77353832183effd5df5e46ebdd99c3e73a05df99f8b3b9d3d564e7af8639d WatchSource:0}: Error finding container 6ad77353832183effd5df5e46ebdd99c3e73a05df99f8b3b9d3d564e7af8639d: Status 404 returned error can't find the container with id 6ad77353832183effd5df5e46ebdd99c3e73a05df99f8b3b9d3d564e7af8639d Sep 29 19:10:49 crc kubenswrapper[4779]: I0929 19:10:49.982514 4779 reconciler.go:161] "OperationExecutor.RegisterPlugin started" plugin={"SocketPath":"/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock","Timestamp":"2025-09-29T19:10:49.072562749Z","Handler":null,"Name":""} Sep 29 19:10:49 crc kubenswrapper[4779]: I0929 19:10:49.986450 4779 csi_plugin.go:100] kubernetes.io/csi: Trying to validate a new CSI Driver with name: kubevirt.io.hostpath-provisioner endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock versions: 1.0.0 Sep 29 19:10:49 crc kubenswrapper[4779]: I0929 19:10:49.986490 4779 csi_plugin.go:113] kubernetes.io/csi: Register new plugin with name: kubevirt.io.hostpath-provisioner at endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock Sep 29 19:10:50 crc kubenswrapper[4779]: I0929 19:10:50.044418 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-tgwxw\" (UID: \"08924ea4-79d3-439f-8bdb-150f807221d9\") " pod="openshift-image-registry/image-registry-697d97f7c8-tgwxw" Sep 29 19:10:50 crc kubenswrapper[4779]: I0929 19:10:50.046983 4779 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Sep 29 19:10:50 crc kubenswrapper[4779]: I0929 19:10:50.047010 4779 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-tgwxw\" (UID: \"08924ea4-79d3-439f-8bdb-150f807221d9\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount\"" pod="openshift-image-registry/image-registry-697d97f7c8-tgwxw" Sep 29 19:10:50 crc kubenswrapper[4779]: I0929 19:10:50.089895 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-tgwxw\" (UID: \"08924ea4-79d3-439f-8bdb-150f807221d9\") " pod="openshift-image-registry/image-registry-697d97f7c8-tgwxw" Sep 29 19:10:50 crc kubenswrapper[4779]: I0929 19:10:50.145729 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 19:10:50 crc kubenswrapper[4779]: I0929 19:10:50.154398 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Sep 29 19:10:50 crc kubenswrapper[4779]: I0929 19:10:50.188180 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-8ncmv"] Sep 29 19:10:50 crc kubenswrapper[4779]: I0929 19:10:50.189585 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-8ncmv" Sep 29 19:10:50 crc kubenswrapper[4779]: I0929 19:10:50.193446 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Sep 29 19:10:50 crc kubenswrapper[4779]: I0929 19:10:50.200051 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-8ncmv"] Sep 29 19:10:50 crc kubenswrapper[4779]: I0929 19:10:50.227146 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-42vjg" Sep 29 19:10:50 crc kubenswrapper[4779]: I0929 19:10:50.347899 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/37942c99-d186-454f-9d72-04ec19e7c737-utilities\") pod \"redhat-marketplace-8ncmv\" (UID: \"37942c99-d186-454f-9d72-04ec19e7c737\") " pod="openshift-marketplace/redhat-marketplace-8ncmv" Sep 29 19:10:50 crc kubenswrapper[4779]: I0929 19:10:50.348237 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/37942c99-d186-454f-9d72-04ec19e7c737-catalog-content\") pod \"redhat-marketplace-8ncmv\" (UID: \"37942c99-d186-454f-9d72-04ec19e7c737\") " pod="openshift-marketplace/redhat-marketplace-8ncmv" Sep 29 19:10:50 crc kubenswrapper[4779]: I0929 19:10:50.348260 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d4jhx\" (UniqueName: \"kubernetes.io/projected/37942c99-d186-454f-9d72-04ec19e7c737-kube-api-access-d4jhx\") pod \"redhat-marketplace-8ncmv\" (UID: \"37942c99-d186-454f-9d72-04ec19e7c737\") " pod="openshift-marketplace/redhat-marketplace-8ncmv" Sep 29 19:10:50 crc kubenswrapper[4779]: I0929 19:10:50.373864 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-tgwxw" Sep 29 19:10:50 crc kubenswrapper[4779]: I0929 19:10:50.449610 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/37942c99-d186-454f-9d72-04ec19e7c737-utilities\") pod \"redhat-marketplace-8ncmv\" (UID: \"37942c99-d186-454f-9d72-04ec19e7c737\") " pod="openshift-marketplace/redhat-marketplace-8ncmv" Sep 29 19:10:50 crc kubenswrapper[4779]: I0929 19:10:50.449670 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/37942c99-d186-454f-9d72-04ec19e7c737-catalog-content\") pod \"redhat-marketplace-8ncmv\" (UID: \"37942c99-d186-454f-9d72-04ec19e7c737\") " pod="openshift-marketplace/redhat-marketplace-8ncmv" Sep 29 19:10:50 crc kubenswrapper[4779]: I0929 19:10:50.449691 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d4jhx\" (UniqueName: \"kubernetes.io/projected/37942c99-d186-454f-9d72-04ec19e7c737-kube-api-access-d4jhx\") pod \"redhat-marketplace-8ncmv\" (UID: \"37942c99-d186-454f-9d72-04ec19e7c737\") " pod="openshift-marketplace/redhat-marketplace-8ncmv" Sep 29 19:10:50 crc kubenswrapper[4779]: I0929 19:10:50.450416 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/37942c99-d186-454f-9d72-04ec19e7c737-utilities\") pod \"redhat-marketplace-8ncmv\" (UID: \"37942c99-d186-454f-9d72-04ec19e7c737\") " pod="openshift-marketplace/redhat-marketplace-8ncmv" Sep 29 19:10:50 crc kubenswrapper[4779]: I0929 19:10:50.450438 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/37942c99-d186-454f-9d72-04ec19e7c737-catalog-content\") pod \"redhat-marketplace-8ncmv\" (UID: \"37942c99-d186-454f-9d72-04ec19e7c737\") " pod="openshift-marketplace/redhat-marketplace-8ncmv" Sep 29 19:10:50 crc kubenswrapper[4779]: I0929 19:10:50.472495 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d4jhx\" (UniqueName: \"kubernetes.io/projected/37942c99-d186-454f-9d72-04ec19e7c737-kube-api-access-d4jhx\") pod \"redhat-marketplace-8ncmv\" (UID: \"37942c99-d186-454f-9d72-04ec19e7c737\") " pod="openshift-marketplace/redhat-marketplace-8ncmv" Sep 29 19:10:50 crc kubenswrapper[4779]: I0929 19:10:50.505942 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-8ncmv" Sep 29 19:10:50 crc kubenswrapper[4779]: I0929 19:10:50.587997 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-mkktm"] Sep 29 19:10:50 crc kubenswrapper[4779]: I0929 19:10:50.589183 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-mkktm" Sep 29 19:10:50 crc kubenswrapper[4779]: I0929 19:10:50.604901 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-mkktm"] Sep 29 19:10:50 crc kubenswrapper[4779]: I0929 19:10:50.619288 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-tgwxw"] Sep 29 19:10:50 crc kubenswrapper[4779]: I0929 19:10:50.647845 4779 generic.go:334] "Generic (PLEG): container finished" podID="a0becbd8-a5ce-42a3-9afe-137668fb98fa" containerID="1815a78da9800fd70af7b6f95a157cb54b2aa46658209f68590f7cc62fefa31f" exitCode=0 Sep 29 19:10:50 crc kubenswrapper[4779]: I0929 19:10:50.647934 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-xshk2" event={"ID":"a0becbd8-a5ce-42a3-9afe-137668fb98fa","Type":"ContainerDied","Data":"1815a78da9800fd70af7b6f95a157cb54b2aa46658209f68590f7cc62fefa31f"} Sep 29 19:10:50 crc kubenswrapper[4779]: I0929 19:10:50.649686 4779 generic.go:334] "Generic (PLEG): container finished" podID="79af1e0e-dad6-413a-80ea-699ef7f5cdad" containerID="837d718292acb63772a0f7caee199e9ba5243f6e7c5cb6032e354690ae060368" exitCode=0 Sep 29 19:10:50 crc kubenswrapper[4779]: I0929 19:10:50.649733 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-rxml4" event={"ID":"79af1e0e-dad6-413a-80ea-699ef7f5cdad","Type":"ContainerDied","Data":"837d718292acb63772a0f7caee199e9ba5243f6e7c5cb6032e354690ae060368"} Sep 29 19:10:50 crc kubenswrapper[4779]: I0929 19:10:50.652151 4779 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Sep 29 19:10:50 crc kubenswrapper[4779]: I0929 19:10:50.652175 4779 generic.go:334] "Generic (PLEG): container finished" podID="6cf06f8e-1088-4c70-809f-c4b1a55e96e5" containerID="e8d205b24efa49a093f990a3e22bb5f7a278c80b4b7f3bc4b14ebd48cebd0ba6" exitCode=0 Sep 29 19:10:50 crc kubenswrapper[4779]: I0929 19:10:50.652333 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-4b959" event={"ID":"6cf06f8e-1088-4c70-809f-c4b1a55e96e5","Type":"ContainerDied","Data":"e8d205b24efa49a093f990a3e22bb5f7a278c80b4b7f3bc4b14ebd48cebd0ba6"} Sep 29 19:10:50 crc kubenswrapper[4779]: I0929 19:10:50.655976 4779 generic.go:334] "Generic (PLEG): container finished" podID="68227c08-072e-4071-9206-2f34b9e9f1cf" containerID="ed00058c3a36fbb809ca57fe749f10ba0e7ca8da205c8091882e7d2081477c73" exitCode=0 Sep 29 19:10:50 crc kubenswrapper[4779]: I0929 19:10:50.656045 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-22bm4" event={"ID":"68227c08-072e-4071-9206-2f34b9e9f1cf","Type":"ContainerDied","Data":"ed00058c3a36fbb809ca57fe749f10ba0e7ca8da205c8091882e7d2081477c73"} Sep 29 19:10:50 crc kubenswrapper[4779]: I0929 19:10:50.658106 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"f188a3c4-156e-4269-87c4-48826a003674","Type":"ContainerStarted","Data":"fef0a6b3752dd9ae21370c772640343cd24f81d9232679c0ed208162f591367e"} Sep 29 19:10:50 crc kubenswrapper[4779]: I0929 19:10:50.658151 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"f188a3c4-156e-4269-87c4-48826a003674","Type":"ContainerStarted","Data":"6ad77353832183effd5df5e46ebdd99c3e73a05df99f8b3b9d3d564e7af8639d"} Sep 29 19:10:50 crc kubenswrapper[4779]: I0929 19:10:50.718386 4779 patch_prober.go:28] interesting pod/router-default-5444994796-4bzgw container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Sep 29 19:10:50 crc kubenswrapper[4779]: [-]has-synced failed: reason withheld Sep 29 19:10:50 crc kubenswrapper[4779]: [+]process-running ok Sep 29 19:10:50 crc kubenswrapper[4779]: healthz check failed Sep 29 19:10:50 crc kubenswrapper[4779]: I0929 19:10:50.718631 4779 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-4bzgw" podUID="b2767e5e-6a38-4668-95f6-f677e298c6f8" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Sep 29 19:10:50 crc kubenswrapper[4779]: I0929 19:10:50.738156 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Sep 29 19:10:50 crc kubenswrapper[4779]: I0929 19:10:50.738847 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Sep 29 19:10:50 crc kubenswrapper[4779]: I0929 19:10:50.738992 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Sep 29 19:10:50 crc kubenswrapper[4779]: I0929 19:10:50.740895 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/revision-pruner-8-crc" podStartSLOduration=1.740875179 podStartE2EDuration="1.740875179s" podCreationTimestamp="2025-09-29 19:10:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 19:10:50.730665162 +0000 UTC m=+161.615090252" watchObservedRunningTime="2025-09-29 19:10:50.740875179 +0000 UTC m=+161.625300279" Sep 29 19:10:50 crc kubenswrapper[4779]: I0929 19:10:50.741739 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager"/"installer-sa-dockercfg-kjl2n" Sep 29 19:10:50 crc kubenswrapper[4779]: I0929 19:10:50.743660 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager"/"kube-root-ca.crt" Sep 29 19:10:50 crc kubenswrapper[4779]: I0929 19:10:50.753249 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9209519d-4c0e-4735-aa5d-731cdb7543a9-catalog-content\") pod \"redhat-marketplace-mkktm\" (UID: \"9209519d-4c0e-4735-aa5d-731cdb7543a9\") " pod="openshift-marketplace/redhat-marketplace-mkktm" Sep 29 19:10:50 crc kubenswrapper[4779]: I0929 19:10:50.753404 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t6cx5\" (UniqueName: \"kubernetes.io/projected/9209519d-4c0e-4735-aa5d-731cdb7543a9-kube-api-access-t6cx5\") pod \"redhat-marketplace-mkktm\" (UID: \"9209519d-4c0e-4735-aa5d-731cdb7543a9\") " pod="openshift-marketplace/redhat-marketplace-mkktm" Sep 29 19:10:50 crc kubenswrapper[4779]: I0929 19:10:50.753423 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9209519d-4c0e-4735-aa5d-731cdb7543a9-utilities\") pod \"redhat-marketplace-mkktm\" (UID: \"9209519d-4c0e-4735-aa5d-731cdb7543a9\") " pod="openshift-marketplace/redhat-marketplace-mkktm" Sep 29 19:10:50 crc kubenswrapper[4779]: I0929 19:10:50.759007 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-8ncmv"] Sep 29 19:10:50 crc kubenswrapper[4779]: W0929 19:10:50.763793 4779 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod37942c99_d186_454f_9d72_04ec19e7c737.slice/crio-c8b5f599216a8cc889424d5e3a93b55d280a372b70df1b6b3b47f4b51e8a44e5 WatchSource:0}: Error finding container c8b5f599216a8cc889424d5e3a93b55d280a372b70df1b6b3b47f4b51e8a44e5: Status 404 returned error can't find the container with id c8b5f599216a8cc889424d5e3a93b55d280a372b70df1b6b3b47f4b51e8a44e5 Sep 29 19:10:50 crc kubenswrapper[4779]: I0929 19:10:50.854240 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/9413a4c1-e317-4da6-af34-34fd6beaddd0-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"9413a4c1-e317-4da6-af34-34fd6beaddd0\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Sep 29 19:10:50 crc kubenswrapper[4779]: I0929 19:10:50.854469 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9209519d-4c0e-4735-aa5d-731cdb7543a9-utilities\") pod \"redhat-marketplace-mkktm\" (UID: \"9209519d-4c0e-4735-aa5d-731cdb7543a9\") " pod="openshift-marketplace/redhat-marketplace-mkktm" Sep 29 19:10:50 crc kubenswrapper[4779]: I0929 19:10:50.854571 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t6cx5\" (UniqueName: \"kubernetes.io/projected/9209519d-4c0e-4735-aa5d-731cdb7543a9-kube-api-access-t6cx5\") pod \"redhat-marketplace-mkktm\" (UID: \"9209519d-4c0e-4735-aa5d-731cdb7543a9\") " pod="openshift-marketplace/redhat-marketplace-mkktm" Sep 29 19:10:50 crc kubenswrapper[4779]: I0929 19:10:50.854714 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9209519d-4c0e-4735-aa5d-731cdb7543a9-catalog-content\") pod \"redhat-marketplace-mkktm\" (UID: \"9209519d-4c0e-4735-aa5d-731cdb7543a9\") " pod="openshift-marketplace/redhat-marketplace-mkktm" Sep 29 19:10:50 crc kubenswrapper[4779]: I0929 19:10:50.854990 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/9413a4c1-e317-4da6-af34-34fd6beaddd0-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"9413a4c1-e317-4da6-af34-34fd6beaddd0\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Sep 29 19:10:50 crc kubenswrapper[4779]: I0929 19:10:50.855665 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9209519d-4c0e-4735-aa5d-731cdb7543a9-utilities\") pod \"redhat-marketplace-mkktm\" (UID: \"9209519d-4c0e-4735-aa5d-731cdb7543a9\") " pod="openshift-marketplace/redhat-marketplace-mkktm" Sep 29 19:10:50 crc kubenswrapper[4779]: I0929 19:10:50.855708 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9209519d-4c0e-4735-aa5d-731cdb7543a9-catalog-content\") pod \"redhat-marketplace-mkktm\" (UID: \"9209519d-4c0e-4735-aa5d-731cdb7543a9\") " pod="openshift-marketplace/redhat-marketplace-mkktm" Sep 29 19:10:50 crc kubenswrapper[4779]: I0929 19:10:50.876994 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t6cx5\" (UniqueName: \"kubernetes.io/projected/9209519d-4c0e-4735-aa5d-731cdb7543a9-kube-api-access-t6cx5\") pod \"redhat-marketplace-mkktm\" (UID: \"9209519d-4c0e-4735-aa5d-731cdb7543a9\") " pod="openshift-marketplace/redhat-marketplace-mkktm" Sep 29 19:10:50 crc kubenswrapper[4779]: I0929 19:10:50.907068 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-mkktm" Sep 29 19:10:50 crc kubenswrapper[4779]: I0929 19:10:50.956818 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/9413a4c1-e317-4da6-af34-34fd6beaddd0-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"9413a4c1-e317-4da6-af34-34fd6beaddd0\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Sep 29 19:10:50 crc kubenswrapper[4779]: I0929 19:10:50.956905 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/9413a4c1-e317-4da6-af34-34fd6beaddd0-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"9413a4c1-e317-4da6-af34-34fd6beaddd0\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Sep 29 19:10:50 crc kubenswrapper[4779]: I0929 19:10:50.956957 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/9413a4c1-e317-4da6-af34-34fd6beaddd0-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"9413a4c1-e317-4da6-af34-34fd6beaddd0\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Sep 29 19:10:50 crc kubenswrapper[4779]: I0929 19:10:50.978701 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/9413a4c1-e317-4da6-af34-34fd6beaddd0-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"9413a4c1-e317-4da6-af34-34fd6beaddd0\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Sep 29 19:10:51 crc kubenswrapper[4779]: I0929 19:10:51.062522 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Sep 29 19:10:51 crc kubenswrapper[4779]: I0929 19:10:51.104445 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-mkktm"] Sep 29 19:10:51 crc kubenswrapper[4779]: W0929 19:10:51.109726 4779 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9209519d_4c0e_4735_aa5d_731cdb7543a9.slice/crio-938dbf80cb5b4d813b54a1abbd826887d81eed6945989b3a9e9e7da0ef011865 WatchSource:0}: Error finding container 938dbf80cb5b4d813b54a1abbd826887d81eed6945989b3a9e9e7da0ef011865: Status 404 returned error can't find the container with id 938dbf80cb5b4d813b54a1abbd826887d81eed6945989b3a9e9e7da0ef011865 Sep 29 19:10:51 crc kubenswrapper[4779]: I0929 19:10:51.189711 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-4nts8"] Sep 29 19:10:51 crc kubenswrapper[4779]: I0929 19:10:51.190741 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-4nts8" Sep 29 19:10:51 crc kubenswrapper[4779]: I0929 19:10:51.192724 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Sep 29 19:10:51 crc kubenswrapper[4779]: I0929 19:10:51.197502 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-4nts8"] Sep 29 19:10:51 crc kubenswrapper[4779]: I0929 19:10:51.361994 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cb7eba8f-ce89-4f64-a05a-d73d0c3b630b-catalog-content\") pod \"redhat-operators-4nts8\" (UID: \"cb7eba8f-ce89-4f64-a05a-d73d0c3b630b\") " pod="openshift-marketplace/redhat-operators-4nts8" Sep 29 19:10:51 crc kubenswrapper[4779]: I0929 19:10:51.362049 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cb7eba8f-ce89-4f64-a05a-d73d0c3b630b-utilities\") pod \"redhat-operators-4nts8\" (UID: \"cb7eba8f-ce89-4f64-a05a-d73d0c3b630b\") " pod="openshift-marketplace/redhat-operators-4nts8" Sep 29 19:10:51 crc kubenswrapper[4779]: I0929 19:10:51.362089 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q565t\" (UniqueName: \"kubernetes.io/projected/cb7eba8f-ce89-4f64-a05a-d73d0c3b630b-kube-api-access-q565t\") pod \"redhat-operators-4nts8\" (UID: \"cb7eba8f-ce89-4f64-a05a-d73d0c3b630b\") " pod="openshift-marketplace/redhat-operators-4nts8" Sep 29 19:10:51 crc kubenswrapper[4779]: I0929 19:10:51.463049 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cb7eba8f-ce89-4f64-a05a-d73d0c3b630b-catalog-content\") pod \"redhat-operators-4nts8\" (UID: \"cb7eba8f-ce89-4f64-a05a-d73d0c3b630b\") " pod="openshift-marketplace/redhat-operators-4nts8" Sep 29 19:10:51 crc kubenswrapper[4779]: I0929 19:10:51.463094 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cb7eba8f-ce89-4f64-a05a-d73d0c3b630b-utilities\") pod \"redhat-operators-4nts8\" (UID: \"cb7eba8f-ce89-4f64-a05a-d73d0c3b630b\") " pod="openshift-marketplace/redhat-operators-4nts8" Sep 29 19:10:51 crc kubenswrapper[4779]: I0929 19:10:51.463125 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q565t\" (UniqueName: \"kubernetes.io/projected/cb7eba8f-ce89-4f64-a05a-d73d0c3b630b-kube-api-access-q565t\") pod \"redhat-operators-4nts8\" (UID: \"cb7eba8f-ce89-4f64-a05a-d73d0c3b630b\") " pod="openshift-marketplace/redhat-operators-4nts8" Sep 29 19:10:51 crc kubenswrapper[4779]: I0929 19:10:51.464018 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cb7eba8f-ce89-4f64-a05a-d73d0c3b630b-utilities\") pod \"redhat-operators-4nts8\" (UID: \"cb7eba8f-ce89-4f64-a05a-d73d0c3b630b\") " pod="openshift-marketplace/redhat-operators-4nts8" Sep 29 19:10:51 crc kubenswrapper[4779]: I0929 19:10:51.464363 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cb7eba8f-ce89-4f64-a05a-d73d0c3b630b-catalog-content\") pod \"redhat-operators-4nts8\" (UID: \"cb7eba8f-ce89-4f64-a05a-d73d0c3b630b\") " pod="openshift-marketplace/redhat-operators-4nts8" Sep 29 19:10:51 crc kubenswrapper[4779]: I0929 19:10:51.495976 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q565t\" (UniqueName: \"kubernetes.io/projected/cb7eba8f-ce89-4f64-a05a-d73d0c3b630b-kube-api-access-q565t\") pod \"redhat-operators-4nts8\" (UID: \"cb7eba8f-ce89-4f64-a05a-d73d0c3b630b\") " pod="openshift-marketplace/redhat-operators-4nts8" Sep 29 19:10:51 crc kubenswrapper[4779]: I0929 19:10:51.499397 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Sep 29 19:10:51 crc kubenswrapper[4779]: W0929 19:10:51.521651 4779 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-pod9413a4c1_e317_4da6_af34_34fd6beaddd0.slice/crio-cfecfd1b42234e8e4ac648c8c056288c614234762e5b81a3b6fde6a0e9408883 WatchSource:0}: Error finding container cfecfd1b42234e8e4ac648c8c056288c614234762e5b81a3b6fde6a0e9408883: Status 404 returned error can't find the container with id cfecfd1b42234e8e4ac648c8c056288c614234762e5b81a3b6fde6a0e9408883 Sep 29 19:10:51 crc kubenswrapper[4779]: I0929 19:10:51.552847 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-4nts8" Sep 29 19:10:51 crc kubenswrapper[4779]: I0929 19:10:51.592798 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-7k5wr"] Sep 29 19:10:51 crc kubenswrapper[4779]: I0929 19:10:51.596443 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-7k5wr" Sep 29 19:10:51 crc kubenswrapper[4779]: I0929 19:10:51.603071 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-7k5wr"] Sep 29 19:10:51 crc kubenswrapper[4779]: I0929 19:10:51.645915 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-apiserver/apiserver-76f77b778f-nxttj" Sep 29 19:10:51 crc kubenswrapper[4779]: I0929 19:10:51.652002 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-apiserver/apiserver-76f77b778f-nxttj" Sep 29 19:10:51 crc kubenswrapper[4779]: I0929 19:10:51.684171 4779 generic.go:334] "Generic (PLEG): container finished" podID="2d999e79-a467-4f19-a67a-f5993c6b4423" containerID="cd53d2289ffddfa4e5198db41eeb503c449f0e6870cff978d2687dd64c7d6c42" exitCode=0 Sep 29 19:10:51 crc kubenswrapper[4779]: I0929 19:10:51.684237 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29319540-c6vdh" event={"ID":"2d999e79-a467-4f19-a67a-f5993c6b4423","Type":"ContainerDied","Data":"cd53d2289ffddfa4e5198db41eeb503c449f0e6870cff978d2687dd64c7d6c42"} Sep 29 19:10:51 crc kubenswrapper[4779]: I0929 19:10:51.694433 4779 generic.go:334] "Generic (PLEG): container finished" podID="9209519d-4c0e-4735-aa5d-731cdb7543a9" containerID="d3211d592c5b6774d70a0946ab95002dd1a3943f9685f3c2abda6ba8268bc1a4" exitCode=0 Sep 29 19:10:51 crc kubenswrapper[4779]: I0929 19:10:51.694577 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mkktm" event={"ID":"9209519d-4c0e-4735-aa5d-731cdb7543a9","Type":"ContainerDied","Data":"d3211d592c5b6774d70a0946ab95002dd1a3943f9685f3c2abda6ba8268bc1a4"} Sep 29 19:10:51 crc kubenswrapper[4779]: I0929 19:10:51.694609 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mkktm" event={"ID":"9209519d-4c0e-4735-aa5d-731cdb7543a9","Type":"ContainerStarted","Data":"938dbf80cb5b4d813b54a1abbd826887d81eed6945989b3a9e9e7da0ef011865"} Sep 29 19:10:51 crc kubenswrapper[4779]: I0929 19:10:51.710218 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ingress/router-default-5444994796-4bzgw" Sep 29 19:10:51 crc kubenswrapper[4779]: I0929 19:10:51.723551 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"9413a4c1-e317-4da6-af34-34fd6beaddd0","Type":"ContainerStarted","Data":"cfecfd1b42234e8e4ac648c8c056288c614234762e5b81a3b6fde6a0e9408883"} Sep 29 19:10:51 crc kubenswrapper[4779]: I0929 19:10:51.729580 4779 patch_prober.go:28] interesting pod/router-default-5444994796-4bzgw container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Sep 29 19:10:51 crc kubenswrapper[4779]: [-]has-synced failed: reason withheld Sep 29 19:10:51 crc kubenswrapper[4779]: [+]process-running ok Sep 29 19:10:51 crc kubenswrapper[4779]: healthz check failed Sep 29 19:10:51 crc kubenswrapper[4779]: I0929 19:10:51.729664 4779 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-4bzgw" podUID="b2767e5e-6a38-4668-95f6-f677e298c6f8" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Sep 29 19:10:51 crc kubenswrapper[4779]: I0929 19:10:51.737104 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-879f6c89f-qlvq5" Sep 29 19:10:51 crc kubenswrapper[4779]: I0929 19:10:51.753234 4779 generic.go:334] "Generic (PLEG): container finished" podID="37942c99-d186-454f-9d72-04ec19e7c737" containerID="3b0dfa255b289d6fbe2a4ae6a40261feb39a7c74bc8cddc737c3f637c81080f7" exitCode=0 Sep 29 19:10:51 crc kubenswrapper[4779]: I0929 19:10:51.753357 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-8ncmv" event={"ID":"37942c99-d186-454f-9d72-04ec19e7c737","Type":"ContainerDied","Data":"3b0dfa255b289d6fbe2a4ae6a40261feb39a7c74bc8cddc737c3f637c81080f7"} Sep 29 19:10:51 crc kubenswrapper[4779]: I0929 19:10:51.753388 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-8ncmv" event={"ID":"37942c99-d186-454f-9d72-04ec19e7c737","Type":"ContainerStarted","Data":"c8b5f599216a8cc889424d5e3a93b55d280a372b70df1b6b3b47f4b51e8a44e5"} Sep 29 19:10:51 crc kubenswrapper[4779]: I0929 19:10:51.764154 4779 generic.go:334] "Generic (PLEG): container finished" podID="f188a3c4-156e-4269-87c4-48826a003674" containerID="fef0a6b3752dd9ae21370c772640343cd24f81d9232679c0ed208162f591367e" exitCode=0 Sep 29 19:10:51 crc kubenswrapper[4779]: I0929 19:10:51.764275 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"f188a3c4-156e-4269-87c4-48826a003674","Type":"ContainerDied","Data":"fef0a6b3752dd9ae21370c772640343cd24f81d9232679c0ed208162f591367e"} Sep 29 19:10:51 crc kubenswrapper[4779]: I0929 19:10:51.781251 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f89bc289-c3b4-4ca5-843b-b9b6ec104ede-utilities\") pod \"redhat-operators-7k5wr\" (UID: \"f89bc289-c3b4-4ca5-843b-b9b6ec104ede\") " pod="openshift-marketplace/redhat-operators-7k5wr" Sep 29 19:10:51 crc kubenswrapper[4779]: I0929 19:10:51.781355 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cgph4\" (UniqueName: \"kubernetes.io/projected/f89bc289-c3b4-4ca5-843b-b9b6ec104ede-kube-api-access-cgph4\") pod \"redhat-operators-7k5wr\" (UID: \"f89bc289-c3b4-4ca5-843b-b9b6ec104ede\") " pod="openshift-marketplace/redhat-operators-7k5wr" Sep 29 19:10:51 crc kubenswrapper[4779]: I0929 19:10:51.781380 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f89bc289-c3b4-4ca5-843b-b9b6ec104ede-catalog-content\") pod \"redhat-operators-7k5wr\" (UID: \"f89bc289-c3b4-4ca5-843b-b9b6ec104ede\") " pod="openshift-marketplace/redhat-operators-7k5wr" Sep 29 19:10:51 crc kubenswrapper[4779]: I0929 19:10:51.810696 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8f668bae-612b-4b75-9490-919e737c6a3b" path="/var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes" Sep 29 19:10:51 crc kubenswrapper[4779]: I0929 19:10:51.811540 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-tgwxw" event={"ID":"08924ea4-79d3-439f-8bdb-150f807221d9","Type":"ContainerStarted","Data":"95f76bd6186ac2fed5ac2348b447a6c5d7e2d25804c23aef88a60cce7e66d933"} Sep 29 19:10:51 crc kubenswrapper[4779]: I0929 19:10:51.811576 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-tgwxw" event={"ID":"08924ea4-79d3-439f-8bdb-150f807221d9","Type":"ContainerStarted","Data":"53168aa818ef5ef7020735fe334d54694ddb2a69e03946cd787b8d01873ad074"} Sep 29 19:10:51 crc kubenswrapper[4779]: I0929 19:10:51.883237 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f89bc289-c3b4-4ca5-843b-b9b6ec104ede-utilities\") pod \"redhat-operators-7k5wr\" (UID: \"f89bc289-c3b4-4ca5-843b-b9b6ec104ede\") " pod="openshift-marketplace/redhat-operators-7k5wr" Sep 29 19:10:51 crc kubenswrapper[4779]: I0929 19:10:51.883355 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cgph4\" (UniqueName: \"kubernetes.io/projected/f89bc289-c3b4-4ca5-843b-b9b6ec104ede-kube-api-access-cgph4\") pod \"redhat-operators-7k5wr\" (UID: \"f89bc289-c3b4-4ca5-843b-b9b6ec104ede\") " pod="openshift-marketplace/redhat-operators-7k5wr" Sep 29 19:10:51 crc kubenswrapper[4779]: I0929 19:10:51.883409 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f89bc289-c3b4-4ca5-843b-b9b6ec104ede-catalog-content\") pod \"redhat-operators-7k5wr\" (UID: \"f89bc289-c3b4-4ca5-843b-b9b6ec104ede\") " pod="openshift-marketplace/redhat-operators-7k5wr" Sep 29 19:10:51 crc kubenswrapper[4779]: I0929 19:10:51.886580 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f89bc289-c3b4-4ca5-843b-b9b6ec104ede-utilities\") pod \"redhat-operators-7k5wr\" (UID: \"f89bc289-c3b4-4ca5-843b-b9b6ec104ede\") " pod="openshift-marketplace/redhat-operators-7k5wr" Sep 29 19:10:51 crc kubenswrapper[4779]: I0929 19:10:51.887759 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f89bc289-c3b4-4ca5-843b-b9b6ec104ede-catalog-content\") pod \"redhat-operators-7k5wr\" (UID: \"f89bc289-c3b4-4ca5-843b-b9b6ec104ede\") " pod="openshift-marketplace/redhat-operators-7k5wr" Sep 29 19:10:51 crc kubenswrapper[4779]: I0929 19:10:51.888662 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-f9d7485db-hhcst" Sep 29 19:10:51 crc kubenswrapper[4779]: I0929 19:10:51.888744 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-f9d7485db-hhcst" Sep 29 19:10:51 crc kubenswrapper[4779]: I0929 19:10:51.893754 4779 patch_prober.go:28] interesting pod/console-f9d7485db-hhcst container/console namespace/openshift-console: Startup probe status=failure output="Get \"https://10.217.0.20:8443/health\": dial tcp 10.217.0.20:8443: connect: connection refused" start-of-body= Sep 29 19:10:51 crc kubenswrapper[4779]: I0929 19:10:51.893828 4779 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-console/console-f9d7485db-hhcst" podUID="52419e1c-e6c0-4225-95bf-da711c24e399" containerName="console" probeResult="failure" output="Get \"https://10.217.0.20:8443/health\": dial tcp 10.217.0.20:8443: connect: connection refused" Sep 29 19:10:51 crc kubenswrapper[4779]: I0929 19:10:51.909930 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cgph4\" (UniqueName: \"kubernetes.io/projected/f89bc289-c3b4-4ca5-843b-b9b6ec104ede-kube-api-access-cgph4\") pod \"redhat-operators-7k5wr\" (UID: \"f89bc289-c3b4-4ca5-843b-b9b6ec104ede\") " pod="openshift-marketplace/redhat-operators-7k5wr" Sep 29 19:10:51 crc kubenswrapper[4779]: I0929 19:10:51.923831 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-7k5wr" Sep 29 19:10:51 crc kubenswrapper[4779]: I0929 19:10:51.966699 4779 patch_prober.go:28] interesting pod/downloads-7954f5f757-4vf95 container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.31:8080/\": dial tcp 10.217.0.31:8080: connect: connection refused" start-of-body= Sep 29 19:10:51 crc kubenswrapper[4779]: I0929 19:10:51.966699 4779 patch_prober.go:28] interesting pod/downloads-7954f5f757-4vf95 container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.31:8080/\": dial tcp 10.217.0.31:8080: connect: connection refused" start-of-body= Sep 29 19:10:51 crc kubenswrapper[4779]: I0929 19:10:51.966745 4779 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-4vf95" podUID="078d7a8c-aa40-42c3-b26e-30dd2a01cae0" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.31:8080/\": dial tcp 10.217.0.31:8080: connect: connection refused" Sep 29 19:10:51 crc kubenswrapper[4779]: I0929 19:10:51.966755 4779 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-4vf95" podUID="078d7a8c-aa40-42c3-b26e-30dd2a01cae0" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.31:8080/\": dial tcp 10.217.0.31:8080: connect: connection refused" Sep 29 19:10:52 crc kubenswrapper[4779]: I0929 19:10:52.069032 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-697d97f7c8-tgwxw" podStartSLOduration=143.069013313 podStartE2EDuration="2m23.069013313s" podCreationTimestamp="2025-09-29 19:08:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 19:10:51.866710675 +0000 UTC m=+162.751135775" watchObservedRunningTime="2025-09-29 19:10:52.069013313 +0000 UTC m=+162.953438413" Sep 29 19:10:52 crc kubenswrapper[4779]: I0929 19:10:52.072443 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-4nts8"] Sep 29 19:10:52 crc kubenswrapper[4779]: I0929 19:10:52.123576 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-bt79h" Sep 29 19:10:52 crc kubenswrapper[4779]: W0929 19:10:52.125137 4779 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podcb7eba8f_ce89_4f64_a05a_d73d0c3b630b.slice/crio-4a46f3ee3c87c70006d0f1b37925c5413e7bc5c140d11c54612d53cb92b86a56 WatchSource:0}: Error finding container 4a46f3ee3c87c70006d0f1b37925c5413e7bc5c140d11c54612d53cb92b86a56: Status 404 returned error can't find the container with id 4a46f3ee3c87c70006d0f1b37925c5413e7bc5c140d11c54612d53cb92b86a56 Sep 29 19:10:52 crc kubenswrapper[4779]: I0929 19:10:52.282017 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-7k5wr"] Sep 29 19:10:52 crc kubenswrapper[4779]: W0929 19:10:52.303002 4779 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf89bc289_c3b4_4ca5_843b_b9b6ec104ede.slice/crio-fe927644b8b61292ac5f95da7594e4398eea57eef6dc130e6c866d25c41b863e WatchSource:0}: Error finding container fe927644b8b61292ac5f95da7594e4398eea57eef6dc130e6c866d25c41b863e: Status 404 returned error can't find the container with id fe927644b8b61292ac5f95da7594e4398eea57eef6dc130e6c866d25c41b863e Sep 29 19:10:52 crc kubenswrapper[4779]: I0929 19:10:52.698335 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/4df079c4-34e3-4132-91bb-ad68488552f8-metrics-certs\") pod \"network-metrics-daemon-2rtwf\" (UID: \"4df079c4-34e3-4132-91bb-ad68488552f8\") " pod="openshift-multus/network-metrics-daemon-2rtwf" Sep 29 19:10:52 crc kubenswrapper[4779]: I0929 19:10:52.708541 4779 patch_prober.go:28] interesting pod/router-default-5444994796-4bzgw container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Sep 29 19:10:52 crc kubenswrapper[4779]: [-]has-synced failed: reason withheld Sep 29 19:10:52 crc kubenswrapper[4779]: [+]process-running ok Sep 29 19:10:52 crc kubenswrapper[4779]: healthz check failed Sep 29 19:10:52 crc kubenswrapper[4779]: I0929 19:10:52.708600 4779 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-4bzgw" podUID="b2767e5e-6a38-4668-95f6-f677e298c6f8" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Sep 29 19:10:52 crc kubenswrapper[4779]: I0929 19:10:52.723181 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/4df079c4-34e3-4132-91bb-ad68488552f8-metrics-certs\") pod \"network-metrics-daemon-2rtwf\" (UID: \"4df079c4-34e3-4132-91bb-ad68488552f8\") " pod="openshift-multus/network-metrics-daemon-2rtwf" Sep 29 19:10:52 crc kubenswrapper[4779]: I0929 19:10:52.783520 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-2rtwf" Sep 29 19:10:52 crc kubenswrapper[4779]: I0929 19:10:52.887209 4779 generic.go:334] "Generic (PLEG): container finished" podID="9413a4c1-e317-4da6-af34-34fd6beaddd0" containerID="22f5cf8fcf14619ed5085f0101610af9a46c9a6c4d20ca2bcc4bb086dd796320" exitCode=0 Sep 29 19:10:52 crc kubenswrapper[4779]: I0929 19:10:52.887274 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"9413a4c1-e317-4da6-af34-34fd6beaddd0","Type":"ContainerDied","Data":"22f5cf8fcf14619ed5085f0101610af9a46c9a6c4d20ca2bcc4bb086dd796320"} Sep 29 19:10:52 crc kubenswrapper[4779]: I0929 19:10:52.895333 4779 generic.go:334] "Generic (PLEG): container finished" podID="f89bc289-c3b4-4ca5-843b-b9b6ec104ede" containerID="e6d3d37723ad7c470644352d54d285f9bf42946e93f0a87358a40a38ebfae26a" exitCode=0 Sep 29 19:10:52 crc kubenswrapper[4779]: I0929 19:10:52.895405 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-7k5wr" event={"ID":"f89bc289-c3b4-4ca5-843b-b9b6ec104ede","Type":"ContainerDied","Data":"e6d3d37723ad7c470644352d54d285f9bf42946e93f0a87358a40a38ebfae26a"} Sep 29 19:10:52 crc kubenswrapper[4779]: I0929 19:10:52.895439 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-7k5wr" event={"ID":"f89bc289-c3b4-4ca5-843b-b9b6ec104ede","Type":"ContainerStarted","Data":"fe927644b8b61292ac5f95da7594e4398eea57eef6dc130e6c866d25c41b863e"} Sep 29 19:10:52 crc kubenswrapper[4779]: I0929 19:10:52.936346 4779 generic.go:334] "Generic (PLEG): container finished" podID="cb7eba8f-ce89-4f64-a05a-d73d0c3b630b" containerID="6eded466b41152f2d69db3e6433962d9fbe46352f7623528f6921d9e3e72196d" exitCode=0 Sep 29 19:10:52 crc kubenswrapper[4779]: I0929 19:10:52.936639 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-4nts8" event={"ID":"cb7eba8f-ce89-4f64-a05a-d73d0c3b630b","Type":"ContainerDied","Data":"6eded466b41152f2d69db3e6433962d9fbe46352f7623528f6921d9e3e72196d"} Sep 29 19:10:52 crc kubenswrapper[4779]: I0929 19:10:52.936667 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-4nts8" event={"ID":"cb7eba8f-ce89-4f64-a05a-d73d0c3b630b","Type":"ContainerStarted","Data":"4a46f3ee3c87c70006d0f1b37925c5413e7bc5c140d11c54612d53cb92b86a56"} Sep 29 19:10:52 crc kubenswrapper[4779]: I0929 19:10:52.937795 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-697d97f7c8-tgwxw" Sep 29 19:10:53 crc kubenswrapper[4779]: I0929 19:10:53.266789 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Sep 29 19:10:53 crc kubenswrapper[4779]: I0929 19:10:53.367179 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319540-c6vdh" Sep 29 19:10:53 crc kubenswrapper[4779]: I0929 19:10:53.412738 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/f188a3c4-156e-4269-87c4-48826a003674-kube-api-access\") pod \"f188a3c4-156e-4269-87c4-48826a003674\" (UID: \"f188a3c4-156e-4269-87c4-48826a003674\") " Sep 29 19:10:53 crc kubenswrapper[4779]: I0929 19:10:53.412875 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/f188a3c4-156e-4269-87c4-48826a003674-kubelet-dir\") pod \"f188a3c4-156e-4269-87c4-48826a003674\" (UID: \"f188a3c4-156e-4269-87c4-48826a003674\") " Sep 29 19:10:53 crc kubenswrapper[4779]: I0929 19:10:53.412920 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f188a3c4-156e-4269-87c4-48826a003674-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "f188a3c4-156e-4269-87c4-48826a003674" (UID: "f188a3c4-156e-4269-87c4-48826a003674"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 29 19:10:53 crc kubenswrapper[4779]: I0929 19:10:53.413151 4779 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/f188a3c4-156e-4269-87c4-48826a003674-kubelet-dir\") on node \"crc\" DevicePath \"\"" Sep 29 19:10:53 crc kubenswrapper[4779]: I0929 19:10:53.432099 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f188a3c4-156e-4269-87c4-48826a003674-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "f188a3c4-156e-4269-87c4-48826a003674" (UID: "f188a3c4-156e-4269-87c4-48826a003674"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:10:53 crc kubenswrapper[4779]: I0929 19:10:53.464875 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-2rtwf"] Sep 29 19:10:53 crc kubenswrapper[4779]: I0929 19:10:53.513603 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/2d999e79-a467-4f19-a67a-f5993c6b4423-secret-volume\") pod \"2d999e79-a467-4f19-a67a-f5993c6b4423\" (UID: \"2d999e79-a467-4f19-a67a-f5993c6b4423\") " Sep 29 19:10:53 crc kubenswrapper[4779]: I0929 19:10:53.513693 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6dpgg\" (UniqueName: \"kubernetes.io/projected/2d999e79-a467-4f19-a67a-f5993c6b4423-kube-api-access-6dpgg\") pod \"2d999e79-a467-4f19-a67a-f5993c6b4423\" (UID: \"2d999e79-a467-4f19-a67a-f5993c6b4423\") " Sep 29 19:10:53 crc kubenswrapper[4779]: I0929 19:10:53.513778 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/2d999e79-a467-4f19-a67a-f5993c6b4423-config-volume\") pod \"2d999e79-a467-4f19-a67a-f5993c6b4423\" (UID: \"2d999e79-a467-4f19-a67a-f5993c6b4423\") " Sep 29 19:10:53 crc kubenswrapper[4779]: I0929 19:10:53.514212 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/f188a3c4-156e-4269-87c4-48826a003674-kube-api-access\") on node \"crc\" DevicePath \"\"" Sep 29 19:10:53 crc kubenswrapper[4779]: I0929 19:10:53.515561 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2d999e79-a467-4f19-a67a-f5993c6b4423-config-volume" (OuterVolumeSpecName: "config-volume") pod "2d999e79-a467-4f19-a67a-f5993c6b4423" (UID: "2d999e79-a467-4f19-a67a-f5993c6b4423"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:10:53 crc kubenswrapper[4779]: I0929 19:10:53.541050 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2d999e79-a467-4f19-a67a-f5993c6b4423-kube-api-access-6dpgg" (OuterVolumeSpecName: "kube-api-access-6dpgg") pod "2d999e79-a467-4f19-a67a-f5993c6b4423" (UID: "2d999e79-a467-4f19-a67a-f5993c6b4423"). InnerVolumeSpecName "kube-api-access-6dpgg". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:10:53 crc kubenswrapper[4779]: I0929 19:10:53.546765 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2d999e79-a467-4f19-a67a-f5993c6b4423-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "2d999e79-a467-4f19-a67a-f5993c6b4423" (UID: "2d999e79-a467-4f19-a67a-f5993c6b4423"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:10:53 crc kubenswrapper[4779]: I0929 19:10:53.615789 4779 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/2d999e79-a467-4f19-a67a-f5993c6b4423-config-volume\") on node \"crc\" DevicePath \"\"" Sep 29 19:10:53 crc kubenswrapper[4779]: I0929 19:10:53.616090 4779 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/2d999e79-a467-4f19-a67a-f5993c6b4423-secret-volume\") on node \"crc\" DevicePath \"\"" Sep 29 19:10:53 crc kubenswrapper[4779]: I0929 19:10:53.616101 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6dpgg\" (UniqueName: \"kubernetes.io/projected/2d999e79-a467-4f19-a67a-f5993c6b4423-kube-api-access-6dpgg\") on node \"crc\" DevicePath \"\"" Sep 29 19:10:53 crc kubenswrapper[4779]: I0929 19:10:53.709585 4779 patch_prober.go:28] interesting pod/router-default-5444994796-4bzgw container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Sep 29 19:10:53 crc kubenswrapper[4779]: [-]has-synced failed: reason withheld Sep 29 19:10:53 crc kubenswrapper[4779]: [+]process-running ok Sep 29 19:10:53 crc kubenswrapper[4779]: healthz check failed Sep 29 19:10:53 crc kubenswrapper[4779]: I0929 19:10:53.709639 4779 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-4bzgw" podUID="b2767e5e-6a38-4668-95f6-f677e298c6f8" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Sep 29 19:10:53 crc kubenswrapper[4779]: I0929 19:10:53.949223 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Sep 29 19:10:53 crc kubenswrapper[4779]: I0929 19:10:53.949249 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"f188a3c4-156e-4269-87c4-48826a003674","Type":"ContainerDied","Data":"6ad77353832183effd5df5e46ebdd99c3e73a05df99f8b3b9d3d564e7af8639d"} Sep 29 19:10:53 crc kubenswrapper[4779]: I0929 19:10:53.949278 4779 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6ad77353832183effd5df5e46ebdd99c3e73a05df99f8b3b9d3d564e7af8639d" Sep 29 19:10:53 crc kubenswrapper[4779]: I0929 19:10:53.954888 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-2rtwf" event={"ID":"4df079c4-34e3-4132-91bb-ad68488552f8","Type":"ContainerStarted","Data":"ddb720454ae0fd949b84af8dbc0e12499609665bedcaae5555cdaad951606c15"} Sep 29 19:10:53 crc kubenswrapper[4779]: I0929 19:10:53.959388 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319540-c6vdh" Sep 29 19:10:53 crc kubenswrapper[4779]: I0929 19:10:53.959834 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29319540-c6vdh" event={"ID":"2d999e79-a467-4f19-a67a-f5993c6b4423","Type":"ContainerDied","Data":"3249fc9a4245d55f2c8e73ac8765ebd3648841699829bd995a27626f1856ff9c"} Sep 29 19:10:53 crc kubenswrapper[4779]: I0929 19:10:53.959857 4779 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3249fc9a4245d55f2c8e73ac8765ebd3648841699829bd995a27626f1856ff9c" Sep 29 19:10:54 crc kubenswrapper[4779]: I0929 19:10:54.270631 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Sep 29 19:10:54 crc kubenswrapper[4779]: I0929 19:10:54.326973 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/9413a4c1-e317-4da6-af34-34fd6beaddd0-kubelet-dir\") pod \"9413a4c1-e317-4da6-af34-34fd6beaddd0\" (UID: \"9413a4c1-e317-4da6-af34-34fd6beaddd0\") " Sep 29 19:10:54 crc kubenswrapper[4779]: I0929 19:10:54.327111 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/9413a4c1-e317-4da6-af34-34fd6beaddd0-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "9413a4c1-e317-4da6-af34-34fd6beaddd0" (UID: "9413a4c1-e317-4da6-af34-34fd6beaddd0"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 29 19:10:54 crc kubenswrapper[4779]: I0929 19:10:54.327330 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/9413a4c1-e317-4da6-af34-34fd6beaddd0-kube-api-access\") pod \"9413a4c1-e317-4da6-af34-34fd6beaddd0\" (UID: \"9413a4c1-e317-4da6-af34-34fd6beaddd0\") " Sep 29 19:10:54 crc kubenswrapper[4779]: I0929 19:10:54.328065 4779 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/9413a4c1-e317-4da6-af34-34fd6beaddd0-kubelet-dir\") on node \"crc\" DevicePath \"\"" Sep 29 19:10:54 crc kubenswrapper[4779]: I0929 19:10:54.333545 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9413a4c1-e317-4da6-af34-34fd6beaddd0-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "9413a4c1-e317-4da6-af34-34fd6beaddd0" (UID: "9413a4c1-e317-4da6-af34-34fd6beaddd0"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:10:54 crc kubenswrapper[4779]: I0929 19:10:54.381326 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-dns/dns-default-q75j6" Sep 29 19:10:54 crc kubenswrapper[4779]: I0929 19:10:54.429349 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/9413a4c1-e317-4da6-af34-34fd6beaddd0-kube-api-access\") on node \"crc\" DevicePath \"\"" Sep 29 19:10:54 crc kubenswrapper[4779]: I0929 19:10:54.708190 4779 patch_prober.go:28] interesting pod/router-default-5444994796-4bzgw container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Sep 29 19:10:54 crc kubenswrapper[4779]: [-]has-synced failed: reason withheld Sep 29 19:10:54 crc kubenswrapper[4779]: [+]process-running ok Sep 29 19:10:54 crc kubenswrapper[4779]: healthz check failed Sep 29 19:10:54 crc kubenswrapper[4779]: I0929 19:10:54.708268 4779 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-4bzgw" podUID="b2767e5e-6a38-4668-95f6-f677e298c6f8" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Sep 29 19:10:54 crc kubenswrapper[4779]: I0929 19:10:54.983161 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-2rtwf" event={"ID":"4df079c4-34e3-4132-91bb-ad68488552f8","Type":"ContainerStarted","Data":"40a391842a319959b418f95bb5820046852a1faa48116cc2d08cdaeea6a8614b"} Sep 29 19:10:54 crc kubenswrapper[4779]: I0929 19:10:54.986112 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"9413a4c1-e317-4da6-af34-34fd6beaddd0","Type":"ContainerDied","Data":"cfecfd1b42234e8e4ac648c8c056288c614234762e5b81a3b6fde6a0e9408883"} Sep 29 19:10:54 crc kubenswrapper[4779]: I0929 19:10:54.986185 4779 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="cfecfd1b42234e8e4ac648c8c056288c614234762e5b81a3b6fde6a0e9408883" Sep 29 19:10:54 crc kubenswrapper[4779]: I0929 19:10:54.986137 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Sep 29 19:10:55 crc kubenswrapper[4779]: I0929 19:10:55.709866 4779 patch_prober.go:28] interesting pod/router-default-5444994796-4bzgw container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Sep 29 19:10:55 crc kubenswrapper[4779]: [-]has-synced failed: reason withheld Sep 29 19:10:55 crc kubenswrapper[4779]: [+]process-running ok Sep 29 19:10:55 crc kubenswrapper[4779]: healthz check failed Sep 29 19:10:55 crc kubenswrapper[4779]: I0929 19:10:55.710194 4779 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-4bzgw" podUID="b2767e5e-6a38-4668-95f6-f677e298c6f8" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Sep 29 19:10:56 crc kubenswrapper[4779]: I0929 19:10:56.002806 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-2rtwf" event={"ID":"4df079c4-34e3-4132-91bb-ad68488552f8","Type":"ContainerStarted","Data":"e7021294a357a57d60fb547335ff353f293b5f064c6f995cf108e2a9dfbae8cd"} Sep 29 19:10:56 crc kubenswrapper[4779]: I0929 19:10:56.022733 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/network-metrics-daemon-2rtwf" podStartSLOduration=147.02270233 podStartE2EDuration="2m27.02270233s" podCreationTimestamp="2025-09-29 19:08:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 19:10:56.018446444 +0000 UTC m=+166.902871564" watchObservedRunningTime="2025-09-29 19:10:56.02270233 +0000 UTC m=+166.907127430" Sep 29 19:10:56 crc kubenswrapper[4779]: I0929 19:10:56.707737 4779 patch_prober.go:28] interesting pod/router-default-5444994796-4bzgw container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Sep 29 19:10:56 crc kubenswrapper[4779]: [-]has-synced failed: reason withheld Sep 29 19:10:56 crc kubenswrapper[4779]: [+]process-running ok Sep 29 19:10:56 crc kubenswrapper[4779]: healthz check failed Sep 29 19:10:56 crc kubenswrapper[4779]: I0929 19:10:56.707791 4779 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-4bzgw" podUID="b2767e5e-6a38-4668-95f6-f677e298c6f8" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Sep 29 19:10:57 crc kubenswrapper[4779]: I0929 19:10:57.708810 4779 patch_prober.go:28] interesting pod/router-default-5444994796-4bzgw container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Sep 29 19:10:57 crc kubenswrapper[4779]: [-]has-synced failed: reason withheld Sep 29 19:10:57 crc kubenswrapper[4779]: [+]process-running ok Sep 29 19:10:57 crc kubenswrapper[4779]: healthz check failed Sep 29 19:10:57 crc kubenswrapper[4779]: I0929 19:10:57.709072 4779 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-4bzgw" podUID="b2767e5e-6a38-4668-95f6-f677e298c6f8" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Sep 29 19:10:58 crc kubenswrapper[4779]: I0929 19:10:58.707922 4779 patch_prober.go:28] interesting pod/router-default-5444994796-4bzgw container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Sep 29 19:10:58 crc kubenswrapper[4779]: [-]has-synced failed: reason withheld Sep 29 19:10:58 crc kubenswrapper[4779]: [+]process-running ok Sep 29 19:10:58 crc kubenswrapper[4779]: healthz check failed Sep 29 19:10:58 crc kubenswrapper[4779]: I0929 19:10:58.707997 4779 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-4bzgw" podUID="b2767e5e-6a38-4668-95f6-f677e298c6f8" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Sep 29 19:10:59 crc kubenswrapper[4779]: I0929 19:10:59.708508 4779 patch_prober.go:28] interesting pod/router-default-5444994796-4bzgw container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Sep 29 19:10:59 crc kubenswrapper[4779]: [-]has-synced failed: reason withheld Sep 29 19:10:59 crc kubenswrapper[4779]: [+]process-running ok Sep 29 19:10:59 crc kubenswrapper[4779]: healthz check failed Sep 29 19:10:59 crc kubenswrapper[4779]: I0929 19:10:59.708628 4779 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-4bzgw" podUID="b2767e5e-6a38-4668-95f6-f677e298c6f8" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Sep 29 19:11:00 crc kubenswrapper[4779]: I0929 19:11:00.708775 4779 patch_prober.go:28] interesting pod/router-default-5444994796-4bzgw container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Sep 29 19:11:00 crc kubenswrapper[4779]: [-]has-synced failed: reason withheld Sep 29 19:11:00 crc kubenswrapper[4779]: [+]process-running ok Sep 29 19:11:00 crc kubenswrapper[4779]: healthz check failed Sep 29 19:11:00 crc kubenswrapper[4779]: I0929 19:11:00.708854 4779 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-4bzgw" podUID="b2767e5e-6a38-4668-95f6-f677e298c6f8" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Sep 29 19:11:01 crc kubenswrapper[4779]: I0929 19:11:01.708802 4779 patch_prober.go:28] interesting pod/router-default-5444994796-4bzgw container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Sep 29 19:11:01 crc kubenswrapper[4779]: [-]has-synced failed: reason withheld Sep 29 19:11:01 crc kubenswrapper[4779]: [+]process-running ok Sep 29 19:11:01 crc kubenswrapper[4779]: healthz check failed Sep 29 19:11:01 crc kubenswrapper[4779]: I0929 19:11:01.709226 4779 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-4bzgw" podUID="b2767e5e-6a38-4668-95f6-f677e298c6f8" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Sep 29 19:11:01 crc kubenswrapper[4779]: I0929 19:11:01.888929 4779 patch_prober.go:28] interesting pod/console-f9d7485db-hhcst container/console namespace/openshift-console: Startup probe status=failure output="Get \"https://10.217.0.20:8443/health\": dial tcp 10.217.0.20:8443: connect: connection refused" start-of-body= Sep 29 19:11:01 crc kubenswrapper[4779]: I0929 19:11:01.889051 4779 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-console/console-f9d7485db-hhcst" podUID="52419e1c-e6c0-4225-95bf-da711c24e399" containerName="console" probeResult="failure" output="Get \"https://10.217.0.20:8443/health\": dial tcp 10.217.0.20:8443: connect: connection refused" Sep 29 19:11:01 crc kubenswrapper[4779]: I0929 19:11:01.965898 4779 patch_prober.go:28] interesting pod/downloads-7954f5f757-4vf95 container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.31:8080/\": dial tcp 10.217.0.31:8080: connect: connection refused" start-of-body= Sep 29 19:11:01 crc kubenswrapper[4779]: I0929 19:11:01.965950 4779 patch_prober.go:28] interesting pod/downloads-7954f5f757-4vf95 container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.31:8080/\": dial tcp 10.217.0.31:8080: connect: connection refused" start-of-body= Sep 29 19:11:01 crc kubenswrapper[4779]: I0929 19:11:01.966000 4779 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-4vf95" podUID="078d7a8c-aa40-42c3-b26e-30dd2a01cae0" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.31:8080/\": dial tcp 10.217.0.31:8080: connect: connection refused" Sep 29 19:11:01 crc kubenswrapper[4779]: I0929 19:11:01.965953 4779 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-4vf95" podUID="078d7a8c-aa40-42c3-b26e-30dd2a01cae0" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.31:8080/\": dial tcp 10.217.0.31:8080: connect: connection refused" Sep 29 19:11:02 crc kubenswrapper[4779]: I0929 19:11:02.708892 4779 patch_prober.go:28] interesting pod/router-default-5444994796-4bzgw container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Sep 29 19:11:02 crc kubenswrapper[4779]: [-]has-synced failed: reason withheld Sep 29 19:11:02 crc kubenswrapper[4779]: [+]process-running ok Sep 29 19:11:02 crc kubenswrapper[4779]: healthz check failed Sep 29 19:11:02 crc kubenswrapper[4779]: I0929 19:11:02.708950 4779 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-4bzgw" podUID="b2767e5e-6a38-4668-95f6-f677e298c6f8" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Sep 29 19:11:03 crc kubenswrapper[4779]: I0929 19:11:03.708768 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-ingress/router-default-5444994796-4bzgw" Sep 29 19:11:03 crc kubenswrapper[4779]: I0929 19:11:03.714587 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ingress/router-default-5444994796-4bzgw" Sep 29 19:11:10 crc kubenswrapper[4779]: I0929 19:11:10.383104 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-697d97f7c8-tgwxw" Sep 29 19:11:11 crc kubenswrapper[4779]: I0929 19:11:11.911225 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-f9d7485db-hhcst" Sep 29 19:11:11 crc kubenswrapper[4779]: I0929 19:11:11.917455 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-f9d7485db-hhcst" Sep 29 19:11:11 crc kubenswrapper[4779]: I0929 19:11:11.973375 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/downloads-7954f5f757-4vf95" Sep 29 19:11:13 crc kubenswrapper[4779]: I0929 19:11:13.785400 4779 patch_prober.go:28] interesting pod/machine-config-daemon-d5cnr container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 19:11:13 crc kubenswrapper[4779]: I0929 19:11:13.785925 4779 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" podUID="476bc421-1113-455e-bcc8-e207e47dad19" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 19:11:19 crc kubenswrapper[4779]: I0929 19:11:19.196307 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 19:11:19 crc kubenswrapper[4779]: E0929 19:11:19.361769 4779 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/community-operator-index:v4.18" Sep 29 19:11:19 crc kubenswrapper[4779]: E0929 19:11:19.362007 4779 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/community-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-5nd9z,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod community-operators-4b959_openshift-marketplace(6cf06f8e-1088-4c70-809f-c4b1a55e96e5): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Sep 29 19:11:19 crc kubenswrapper[4779]: E0929 19:11:19.363292 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/community-operators-4b959" podUID="6cf06f8e-1088-4c70-809f-c4b1a55e96e5" Sep 29 19:11:21 crc kubenswrapper[4779]: I0929 19:11:21.972222 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-h4zfh" Sep 29 19:11:23 crc kubenswrapper[4779]: E0929 19:11:23.378832 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"\"" pod="openshift-marketplace/community-operators-4b959" podUID="6cf06f8e-1088-4c70-809f-c4b1a55e96e5" Sep 29 19:11:23 crc kubenswrapper[4779]: E0929 19:11:23.648240 4779 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/community-operator-index:v4.18" Sep 29 19:11:23 crc kubenswrapper[4779]: E0929 19:11:23.648437 4779 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/community-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-wcfz6,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod community-operators-xshk2_openshift-marketplace(a0becbd8-a5ce-42a3-9afe-137668fb98fa): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Sep 29 19:11:23 crc kubenswrapper[4779]: E0929 19:11:23.649660 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/community-operators-xshk2" podUID="a0becbd8-a5ce-42a3-9afe-137668fb98fa" Sep 29 19:11:27 crc kubenswrapper[4779]: E0929 19:11:27.377658 4779 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/certified-operator-index:v4.18" Sep 29 19:11:27 crc kubenswrapper[4779]: E0929 19:11:27.377932 4779 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/certified-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-dw9kg,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod certified-operators-22bm4_openshift-marketplace(68227c08-072e-4071-9206-2f34b9e9f1cf): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Sep 29 19:11:27 crc kubenswrapper[4779]: E0929 19:11:27.379240 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/certified-operators-22bm4" podUID="68227c08-072e-4071-9206-2f34b9e9f1cf" Sep 29 19:11:29 crc kubenswrapper[4779]: E0929 19:11:29.518812 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"\"" pod="openshift-marketplace/community-operators-xshk2" podUID="a0becbd8-a5ce-42a3-9afe-137668fb98fa" Sep 29 19:11:29 crc kubenswrapper[4779]: E0929 19:11:29.518958 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"\"" pod="openshift-marketplace/certified-operators-22bm4" podUID="68227c08-072e-4071-9206-2f34b9e9f1cf" Sep 29 19:11:30 crc kubenswrapper[4779]: E0929 19:11:30.046841 4779 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/certified-operator-index:v4.18" Sep 29 19:11:30 crc kubenswrapper[4779]: E0929 19:11:30.047202 4779 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/certified-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-hct62,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod certified-operators-rxml4_openshift-marketplace(79af1e0e-dad6-413a-80ea-699ef7f5cdad): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Sep 29 19:11:30 crc kubenswrapper[4779]: E0929 19:11:30.048531 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/certified-operators-rxml4" podUID="79af1e0e-dad6-413a-80ea-699ef7f5cdad" Sep 29 19:11:31 crc kubenswrapper[4779]: E0929 19:11:31.441171 4779 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-marketplace-index:v4.18" Sep 29 19:11:31 crc kubenswrapper[4779]: E0929 19:11:31.441335 4779 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-marketplace-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-d4jhx,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-marketplace-8ncmv_openshift-marketplace(37942c99-d186-454f-9d72-04ec19e7c737): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Sep 29 19:11:31 crc kubenswrapper[4779]: E0929 19:11:31.442727 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-marketplace-8ncmv" podUID="37942c99-d186-454f-9d72-04ec19e7c737" Sep 29 19:11:32 crc kubenswrapper[4779]: E0929 19:11:32.816397 4779 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-marketplace-index:v4.18" Sep 29 19:11:32 crc kubenswrapper[4779]: E0929 19:11:32.816804 4779 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-marketplace-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-t6cx5,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-marketplace-mkktm_openshift-marketplace(9209519d-4c0e-4735-aa5d-731cdb7543a9): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Sep 29 19:11:32 crc kubenswrapper[4779]: E0929 19:11:32.818016 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-marketplace-mkktm" podUID="9209519d-4c0e-4735-aa5d-731cdb7543a9" Sep 29 19:11:33 crc kubenswrapper[4779]: E0929 19:11:33.884068 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"\"" pod="openshift-marketplace/certified-operators-rxml4" podUID="79af1e0e-dad6-413a-80ea-699ef7f5cdad" Sep 29 19:11:33 crc kubenswrapper[4779]: E0929 19:11:33.884025 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-marketplace-mkktm" podUID="9209519d-4c0e-4735-aa5d-731cdb7543a9" Sep 29 19:11:33 crc kubenswrapper[4779]: E0929 19:11:33.884240 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-marketplace-8ncmv" podUID="37942c99-d186-454f-9d72-04ec19e7c737" Sep 29 19:11:34 crc kubenswrapper[4779]: E0929 19:11:34.142202 4779 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-operator-index:v4.18" Sep 29 19:11:34 crc kubenswrapper[4779]: E0929 19:11:34.142385 4779 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-cgph4,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-operators-7k5wr_openshift-marketplace(f89bc289-c3b4-4ca5-843b-b9b6ec104ede): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Sep 29 19:11:34 crc kubenswrapper[4779]: E0929 19:11:34.143585 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-operators-7k5wr" podUID="f89bc289-c3b4-4ca5-843b-b9b6ec104ede" Sep 29 19:11:34 crc kubenswrapper[4779]: E0929 19:11:34.281861 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-operators-7k5wr" podUID="f89bc289-c3b4-4ca5-843b-b9b6ec104ede" Sep 29 19:11:36 crc kubenswrapper[4779]: E0929 19:11:36.872422 4779 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-operator-index:v4.18" Sep 29 19:11:36 crc kubenswrapper[4779]: E0929 19:11:36.872794 4779 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-q565t,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-operators-4nts8_openshift-marketplace(cb7eba8f-ce89-4f64-a05a-d73d0c3b630b): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Sep 29 19:11:36 crc kubenswrapper[4779]: E0929 19:11:36.874028 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-operators-4nts8" podUID="cb7eba8f-ce89-4f64-a05a-d73d0c3b630b" Sep 29 19:11:37 crc kubenswrapper[4779]: E0929 19:11:37.298694 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-operators-4nts8" podUID="cb7eba8f-ce89-4f64-a05a-d73d0c3b630b" Sep 29 19:11:39 crc kubenswrapper[4779]: I0929 19:11:39.310440 4779 generic.go:334] "Generic (PLEG): container finished" podID="6cf06f8e-1088-4c70-809f-c4b1a55e96e5" containerID="856368aee0f6c0d45213fe87848ce2c0ac53cca9d92d1ac0cb5ed50efab57483" exitCode=0 Sep 29 19:11:39 crc kubenswrapper[4779]: I0929 19:11:39.310569 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-4b959" event={"ID":"6cf06f8e-1088-4c70-809f-c4b1a55e96e5","Type":"ContainerDied","Data":"856368aee0f6c0d45213fe87848ce2c0ac53cca9d92d1ac0cb5ed50efab57483"} Sep 29 19:11:40 crc kubenswrapper[4779]: I0929 19:11:40.316761 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-4b959" event={"ID":"6cf06f8e-1088-4c70-809f-c4b1a55e96e5","Type":"ContainerStarted","Data":"7dc6b099204da081e3fd6cc015737a917db7d93444374f97282895ab0ef89e8d"} Sep 29 19:11:40 crc kubenswrapper[4779]: I0929 19:11:40.337545 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-4b959" podStartSLOduration=3.226363339 podStartE2EDuration="52.337516414s" podCreationTimestamp="2025-09-29 19:10:48 +0000 UTC" firstStartedPulling="2025-09-29 19:10:50.658289319 +0000 UTC m=+161.542714409" lastFinishedPulling="2025-09-29 19:11:39.769442384 +0000 UTC m=+210.653867484" observedRunningTime="2025-09-29 19:11:40.335411346 +0000 UTC m=+211.219836446" watchObservedRunningTime="2025-09-29 19:11:40.337516414 +0000 UTC m=+211.221941514" Sep 29 19:11:43 crc kubenswrapper[4779]: I0929 19:11:43.786121 4779 patch_prober.go:28] interesting pod/machine-config-daemon-d5cnr container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 19:11:43 crc kubenswrapper[4779]: I0929 19:11:43.786799 4779 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" podUID="476bc421-1113-455e-bcc8-e207e47dad19" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 19:11:43 crc kubenswrapper[4779]: I0929 19:11:43.786864 4779 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" Sep 29 19:11:43 crc kubenswrapper[4779]: I0929 19:11:43.789046 4779 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"7168ecfffda2ef4f7782650466ccebb8abcff3293fcd33d44eb4ee24abf6339c"} pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 29 19:11:43 crc kubenswrapper[4779]: I0929 19:11:43.789383 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" podUID="476bc421-1113-455e-bcc8-e207e47dad19" containerName="machine-config-daemon" containerID="cri-o://7168ecfffda2ef4f7782650466ccebb8abcff3293fcd33d44eb4ee24abf6339c" gracePeriod=600 Sep 29 19:11:44 crc kubenswrapper[4779]: I0929 19:11:44.342438 4779 generic.go:334] "Generic (PLEG): container finished" podID="68227c08-072e-4071-9206-2f34b9e9f1cf" containerID="42b4cb6a6c58e1984c5f8630fc4c97fd1e964680b02e7240c2b6ec13a556f92f" exitCode=0 Sep 29 19:11:44 crc kubenswrapper[4779]: I0929 19:11:44.342485 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-22bm4" event={"ID":"68227c08-072e-4071-9206-2f34b9e9f1cf","Type":"ContainerDied","Data":"42b4cb6a6c58e1984c5f8630fc4c97fd1e964680b02e7240c2b6ec13a556f92f"} Sep 29 19:11:44 crc kubenswrapper[4779]: I0929 19:11:44.345628 4779 generic.go:334] "Generic (PLEG): container finished" podID="476bc421-1113-455e-bcc8-e207e47dad19" containerID="7168ecfffda2ef4f7782650466ccebb8abcff3293fcd33d44eb4ee24abf6339c" exitCode=0 Sep 29 19:11:44 crc kubenswrapper[4779]: I0929 19:11:44.345649 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" event={"ID":"476bc421-1113-455e-bcc8-e207e47dad19","Type":"ContainerDied","Data":"7168ecfffda2ef4f7782650466ccebb8abcff3293fcd33d44eb4ee24abf6339c"} Sep 29 19:11:44 crc kubenswrapper[4779]: I0929 19:11:44.345670 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" event={"ID":"476bc421-1113-455e-bcc8-e207e47dad19","Type":"ContainerStarted","Data":"723d137f12ddc65960396536e98fe092ad24733cca2a22667ac4ae7a3c589f72"} Sep 29 19:11:45 crc kubenswrapper[4779]: I0929 19:11:45.354029 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-22bm4" event={"ID":"68227c08-072e-4071-9206-2f34b9e9f1cf","Type":"ContainerStarted","Data":"bc387b9993cbdea292d381d97e5dda44cabc3b85d7aba3d2aa9c1a76dad65737"} Sep 29 19:11:45 crc kubenswrapper[4779]: I0929 19:11:45.379257 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-22bm4" podStartSLOduration=3.007232224 podStartE2EDuration="57.37923361s" podCreationTimestamp="2025-09-29 19:10:48 +0000 UTC" firstStartedPulling="2025-09-29 19:10:50.658611707 +0000 UTC m=+161.543036807" lastFinishedPulling="2025-09-29 19:11:45.030613083 +0000 UTC m=+215.915038193" observedRunningTime="2025-09-29 19:11:45.374515131 +0000 UTC m=+216.258940231" watchObservedRunningTime="2025-09-29 19:11:45.37923361 +0000 UTC m=+216.263658710" Sep 29 19:11:46 crc kubenswrapper[4779]: I0929 19:11:46.361835 4779 generic.go:334] "Generic (PLEG): container finished" podID="a0becbd8-a5ce-42a3-9afe-137668fb98fa" containerID="22ed9d81ea48f7e666fb4b360f0c6950a9412f8697ef0e60ee5a78604ee327a9" exitCode=0 Sep 29 19:11:46 crc kubenswrapper[4779]: I0929 19:11:46.361875 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-xshk2" event={"ID":"a0becbd8-a5ce-42a3-9afe-137668fb98fa","Type":"ContainerDied","Data":"22ed9d81ea48f7e666fb4b360f0c6950a9412f8697ef0e60ee5a78604ee327a9"} Sep 29 19:11:47 crc kubenswrapper[4779]: I0929 19:11:47.377983 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-xshk2" event={"ID":"a0becbd8-a5ce-42a3-9afe-137668fb98fa","Type":"ContainerStarted","Data":"66e5b9f615bcfbbfdbf1e7cf8fed42905ad782c5c2b94a5055700ac4bf301dd0"} Sep 29 19:11:47 crc kubenswrapper[4779]: I0929 19:11:47.381920 4779 generic.go:334] "Generic (PLEG): container finished" podID="9209519d-4c0e-4735-aa5d-731cdb7543a9" containerID="e01abc43d3a75ea5da74cd1f64417c78e47d5662ad17017bc52c41781fbefa0c" exitCode=0 Sep 29 19:11:47 crc kubenswrapper[4779]: I0929 19:11:47.381968 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mkktm" event={"ID":"9209519d-4c0e-4735-aa5d-731cdb7543a9","Type":"ContainerDied","Data":"e01abc43d3a75ea5da74cd1f64417c78e47d5662ad17017bc52c41781fbefa0c"} Sep 29 19:11:47 crc kubenswrapper[4779]: I0929 19:11:47.402531 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-xshk2" podStartSLOduration=3.141918407 podStartE2EDuration="59.40251288s" podCreationTimestamp="2025-09-29 19:10:48 +0000 UTC" firstStartedPulling="2025-09-29 19:10:50.651906186 +0000 UTC m=+161.536331286" lastFinishedPulling="2025-09-29 19:11:46.912500649 +0000 UTC m=+217.796925759" observedRunningTime="2025-09-29 19:11:47.399699744 +0000 UTC m=+218.284124884" watchObservedRunningTime="2025-09-29 19:11:47.40251288 +0000 UTC m=+218.286937980" Sep 29 19:11:48 crc kubenswrapper[4779]: I0929 19:11:48.388085 4779 generic.go:334] "Generic (PLEG): container finished" podID="f89bc289-c3b4-4ca5-843b-b9b6ec104ede" containerID="35faa773a5e4556cba6afa8832d1d2f96261f5231bb604c4a467ad860efa2f85" exitCode=0 Sep 29 19:11:48 crc kubenswrapper[4779]: I0929 19:11:48.388127 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-7k5wr" event={"ID":"f89bc289-c3b4-4ca5-843b-b9b6ec104ede","Type":"ContainerDied","Data":"35faa773a5e4556cba6afa8832d1d2f96261f5231bb604c4a467ad860efa2f85"} Sep 29 19:11:48 crc kubenswrapper[4779]: I0929 19:11:48.392787 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mkktm" event={"ID":"9209519d-4c0e-4735-aa5d-731cdb7543a9","Type":"ContainerStarted","Data":"3125bec3559399f07ba0644715c7bfb2cc7489253cb66263fe0261e2a85ec01f"} Sep 29 19:11:48 crc kubenswrapper[4779]: I0929 19:11:48.430128 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-mkktm" podStartSLOduration=2.172190297 podStartE2EDuration="58.43011108s" podCreationTimestamp="2025-09-29 19:10:50 +0000 UTC" firstStartedPulling="2025-09-29 19:10:51.71572704 +0000 UTC m=+162.600152140" lastFinishedPulling="2025-09-29 19:11:47.973647813 +0000 UTC m=+218.858072923" observedRunningTime="2025-09-29 19:11:48.428410754 +0000 UTC m=+219.312835874" watchObservedRunningTime="2025-09-29 19:11:48.43011108 +0000 UTC m=+219.314536180" Sep 29 19:11:48 crc kubenswrapper[4779]: I0929 19:11:48.508372 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-4b959" Sep 29 19:11:48 crc kubenswrapper[4779]: I0929 19:11:48.508426 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-4b959" Sep 29 19:11:48 crc kubenswrapper[4779]: I0929 19:11:48.686916 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-4b959" Sep 29 19:11:49 crc kubenswrapper[4779]: I0929 19:11:49.008541 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-xshk2" Sep 29 19:11:49 crc kubenswrapper[4779]: I0929 19:11:49.009004 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-xshk2" Sep 29 19:11:49 crc kubenswrapper[4779]: I0929 19:11:49.040594 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-22bm4" Sep 29 19:11:49 crc kubenswrapper[4779]: I0929 19:11:49.040655 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-22bm4" Sep 29 19:11:49 crc kubenswrapper[4779]: I0929 19:11:49.078418 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-22bm4" Sep 29 19:11:49 crc kubenswrapper[4779]: I0929 19:11:49.402450 4779 generic.go:334] "Generic (PLEG): container finished" podID="37942c99-d186-454f-9d72-04ec19e7c737" containerID="cb08b414bc2257510dcc1ebf7f840b380fee9b30e0f680d8209c87b9e130fdef" exitCode=0 Sep 29 19:11:49 crc kubenswrapper[4779]: I0929 19:11:49.402527 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-8ncmv" event={"ID":"37942c99-d186-454f-9d72-04ec19e7c737","Type":"ContainerDied","Data":"cb08b414bc2257510dcc1ebf7f840b380fee9b30e0f680d8209c87b9e130fdef"} Sep 29 19:11:49 crc kubenswrapper[4779]: I0929 19:11:49.409456 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-7k5wr" event={"ID":"f89bc289-c3b4-4ca5-843b-b9b6ec104ede","Type":"ContainerStarted","Data":"690f2500525c371ab67351244cd385f0a75588b0ea05117ea08058b45a134264"} Sep 29 19:11:49 crc kubenswrapper[4779]: I0929 19:11:49.446948 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-7k5wr" podStartSLOduration=2.612671724 podStartE2EDuration="58.446921397s" podCreationTimestamp="2025-09-29 19:10:51 +0000 UTC" firstStartedPulling="2025-09-29 19:10:52.916541631 +0000 UTC m=+163.800966731" lastFinishedPulling="2025-09-29 19:11:48.750791304 +0000 UTC m=+219.635216404" observedRunningTime="2025-09-29 19:11:49.445515628 +0000 UTC m=+220.329940788" watchObservedRunningTime="2025-09-29 19:11:49.446921397 +0000 UTC m=+220.331346527" Sep 29 19:11:49 crc kubenswrapper[4779]: I0929 19:11:49.475437 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-4b959" Sep 29 19:11:50 crc kubenswrapper[4779]: I0929 19:11:50.056421 4779 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/community-operators-xshk2" podUID="a0becbd8-a5ce-42a3-9afe-137668fb98fa" containerName="registry-server" probeResult="failure" output=< Sep 29 19:11:50 crc kubenswrapper[4779]: timeout: failed to connect service ":50051" within 1s Sep 29 19:11:50 crc kubenswrapper[4779]: > Sep 29 19:11:50 crc kubenswrapper[4779]: I0929 19:11:50.417843 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-8ncmv" event={"ID":"37942c99-d186-454f-9d72-04ec19e7c737","Type":"ContainerStarted","Data":"1123e5bd8b660916fee65d2faee7d6ff0345ddfa9bd3e10d0e8cc9e4048efd77"} Sep 29 19:11:50 crc kubenswrapper[4779]: I0929 19:11:50.421205 4779 generic.go:334] "Generic (PLEG): container finished" podID="79af1e0e-dad6-413a-80ea-699ef7f5cdad" containerID="12e503eda918eba14a51b7344a1c993a125d3ea878ab62bda95f69888d5561db" exitCode=0 Sep 29 19:11:50 crc kubenswrapper[4779]: I0929 19:11:50.421873 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-rxml4" event={"ID":"79af1e0e-dad6-413a-80ea-699ef7f5cdad","Type":"ContainerDied","Data":"12e503eda918eba14a51b7344a1c993a125d3ea878ab62bda95f69888d5561db"} Sep 29 19:11:50 crc kubenswrapper[4779]: I0929 19:11:50.436352 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-8ncmv" podStartSLOduration=2.360959768 podStartE2EDuration="1m0.436309075s" podCreationTimestamp="2025-09-29 19:10:50 +0000 UTC" firstStartedPulling="2025-09-29 19:10:51.7606915 +0000 UTC m=+162.645116600" lastFinishedPulling="2025-09-29 19:11:49.836040797 +0000 UTC m=+220.720465907" observedRunningTime="2025-09-29 19:11:50.434864226 +0000 UTC m=+221.319289326" watchObservedRunningTime="2025-09-29 19:11:50.436309075 +0000 UTC m=+221.320734185" Sep 29 19:11:50 crc kubenswrapper[4779]: I0929 19:11:50.507296 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-8ncmv" Sep 29 19:11:50 crc kubenswrapper[4779]: I0929 19:11:50.507450 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-8ncmv" Sep 29 19:11:50 crc kubenswrapper[4779]: I0929 19:11:50.907525 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-mkktm" Sep 29 19:11:50 crc kubenswrapper[4779]: I0929 19:11:50.907601 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-mkktm" Sep 29 19:11:50 crc kubenswrapper[4779]: I0929 19:11:50.963724 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-mkktm" Sep 29 19:11:51 crc kubenswrapper[4779]: I0929 19:11:51.436122 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-4nts8" event={"ID":"cb7eba8f-ce89-4f64-a05a-d73d0c3b630b","Type":"ContainerStarted","Data":"256a02e00f3640e43b9b6cc0c3da0403991fbeee414822b496316ab3fb365a1c"} Sep 29 19:11:51 crc kubenswrapper[4779]: I0929 19:11:51.560156 4779 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-marketplace-8ncmv" podUID="37942c99-d186-454f-9d72-04ec19e7c737" containerName="registry-server" probeResult="failure" output=< Sep 29 19:11:51 crc kubenswrapper[4779]: timeout: failed to connect service ":50051" within 1s Sep 29 19:11:51 crc kubenswrapper[4779]: > Sep 29 19:11:51 crc kubenswrapper[4779]: I0929 19:11:51.925440 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-7k5wr" Sep 29 19:11:51 crc kubenswrapper[4779]: I0929 19:11:51.925502 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-7k5wr" Sep 29 19:11:52 crc kubenswrapper[4779]: I0929 19:11:52.443739 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-rxml4" event={"ID":"79af1e0e-dad6-413a-80ea-699ef7f5cdad","Type":"ContainerStarted","Data":"ffe9bf7d456cfc770cba37cf963bbaebf117efb7d01061fbd5f40e061c190c07"} Sep 29 19:11:52 crc kubenswrapper[4779]: I0929 19:11:52.445218 4779 generic.go:334] "Generic (PLEG): container finished" podID="cb7eba8f-ce89-4f64-a05a-d73d0c3b630b" containerID="256a02e00f3640e43b9b6cc0c3da0403991fbeee414822b496316ab3fb365a1c" exitCode=0 Sep 29 19:11:52 crc kubenswrapper[4779]: I0929 19:11:52.445241 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-4nts8" event={"ID":"cb7eba8f-ce89-4f64-a05a-d73d0c3b630b","Type":"ContainerDied","Data":"256a02e00f3640e43b9b6cc0c3da0403991fbeee414822b496316ab3fb365a1c"} Sep 29 19:11:52 crc kubenswrapper[4779]: I0929 19:11:52.460658 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-rxml4" podStartSLOduration=4.886721223 podStartE2EDuration="1m5.460642914s" podCreationTimestamp="2025-09-29 19:10:47 +0000 UTC" firstStartedPulling="2025-09-29 19:10:50.674269762 +0000 UTC m=+161.558694862" lastFinishedPulling="2025-09-29 19:11:51.248191413 +0000 UTC m=+222.132616553" observedRunningTime="2025-09-29 19:11:52.459269946 +0000 UTC m=+223.343695046" watchObservedRunningTime="2025-09-29 19:11:52.460642914 +0000 UTC m=+223.345068014" Sep 29 19:11:52 crc kubenswrapper[4779]: I0929 19:11:52.966430 4779 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-7k5wr" podUID="f89bc289-c3b4-4ca5-843b-b9b6ec104ede" containerName="registry-server" probeResult="failure" output=< Sep 29 19:11:52 crc kubenswrapper[4779]: timeout: failed to connect service ":50051" within 1s Sep 29 19:11:52 crc kubenswrapper[4779]: > Sep 29 19:11:54 crc kubenswrapper[4779]: I0929 19:11:54.457912 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-4nts8" event={"ID":"cb7eba8f-ce89-4f64-a05a-d73d0c3b630b","Type":"ContainerStarted","Data":"0f5d45d2042f252e7caa75a1e1f5e74df23aeb7e6c0d7db3e2c7bc9d4eac333c"} Sep 29 19:11:54 crc kubenswrapper[4779]: I0929 19:11:54.485075 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-4nts8" podStartSLOduration=2.911770545 podStartE2EDuration="1m3.485046005s" podCreationTimestamp="2025-09-29 19:10:51 +0000 UTC" firstStartedPulling="2025-09-29 19:10:52.938202148 +0000 UTC m=+163.822627248" lastFinishedPulling="2025-09-29 19:11:53.511477608 +0000 UTC m=+224.395902708" observedRunningTime="2025-09-29 19:11:54.47901406 +0000 UTC m=+225.363439190" watchObservedRunningTime="2025-09-29 19:11:54.485046005 +0000 UTC m=+225.369471145" Sep 29 19:11:58 crc kubenswrapper[4779]: I0929 19:11:58.368891 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-rxml4" Sep 29 19:11:58 crc kubenswrapper[4779]: I0929 19:11:58.369305 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-rxml4" Sep 29 19:11:58 crc kubenswrapper[4779]: I0929 19:11:58.432650 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-rxml4" Sep 29 19:11:58 crc kubenswrapper[4779]: I0929 19:11:58.540441 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-rxml4" Sep 29 19:11:59 crc kubenswrapper[4779]: I0929 19:11:59.069928 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-xshk2" Sep 29 19:11:59 crc kubenswrapper[4779]: I0929 19:11:59.088934 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-22bm4" Sep 29 19:11:59 crc kubenswrapper[4779]: I0929 19:11:59.140270 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-xshk2" Sep 29 19:12:00 crc kubenswrapper[4779]: I0929 19:12:00.061884 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-xshk2"] Sep 29 19:12:00 crc kubenswrapper[4779]: I0929 19:12:00.497353 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-xshk2" podUID="a0becbd8-a5ce-42a3-9afe-137668fb98fa" containerName="registry-server" containerID="cri-o://66e5b9f615bcfbbfdbf1e7cf8fed42905ad782c5c2b94a5055700ac4bf301dd0" gracePeriod=2 Sep 29 19:12:00 crc kubenswrapper[4779]: I0929 19:12:00.551303 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-8ncmv" Sep 29 19:12:00 crc kubenswrapper[4779]: I0929 19:12:00.598734 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-8ncmv" Sep 29 19:12:00 crc kubenswrapper[4779]: I0929 19:12:00.972817 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-mkktm" Sep 29 19:12:01 crc kubenswrapper[4779]: I0929 19:12:01.439156 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-xshk2" Sep 29 19:12:01 crc kubenswrapper[4779]: I0929 19:12:01.470465 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-22bm4"] Sep 29 19:12:01 crc kubenswrapper[4779]: I0929 19:12:01.470739 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-22bm4" podUID="68227c08-072e-4071-9206-2f34b9e9f1cf" containerName="registry-server" containerID="cri-o://bc387b9993cbdea292d381d97e5dda44cabc3b85d7aba3d2aa9c1a76dad65737" gracePeriod=2 Sep 29 19:12:01 crc kubenswrapper[4779]: I0929 19:12:01.514971 4779 generic.go:334] "Generic (PLEG): container finished" podID="a0becbd8-a5ce-42a3-9afe-137668fb98fa" containerID="66e5b9f615bcfbbfdbf1e7cf8fed42905ad782c5c2b94a5055700ac4bf301dd0" exitCode=0 Sep 29 19:12:01 crc kubenswrapper[4779]: I0929 19:12:01.515043 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-xshk2" Sep 29 19:12:01 crc kubenswrapper[4779]: I0929 19:12:01.515101 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-xshk2" event={"ID":"a0becbd8-a5ce-42a3-9afe-137668fb98fa","Type":"ContainerDied","Data":"66e5b9f615bcfbbfdbf1e7cf8fed42905ad782c5c2b94a5055700ac4bf301dd0"} Sep 29 19:12:01 crc kubenswrapper[4779]: I0929 19:12:01.515159 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-xshk2" event={"ID":"a0becbd8-a5ce-42a3-9afe-137668fb98fa","Type":"ContainerDied","Data":"8a67cc0f4136797177f0160d747b83e388aa1576bf545e18b846dd5fe0607613"} Sep 29 19:12:01 crc kubenswrapper[4779]: I0929 19:12:01.515182 4779 scope.go:117] "RemoveContainer" containerID="66e5b9f615bcfbbfdbf1e7cf8fed42905ad782c5c2b94a5055700ac4bf301dd0" Sep 29 19:12:01 crc kubenswrapper[4779]: I0929 19:12:01.553171 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-4nts8" Sep 29 19:12:01 crc kubenswrapper[4779]: I0929 19:12:01.553689 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-4nts8" Sep 29 19:12:01 crc kubenswrapper[4779]: I0929 19:12:01.567042 4779 scope.go:117] "RemoveContainer" containerID="22ed9d81ea48f7e666fb4b360f0c6950a9412f8697ef0e60ee5a78604ee327a9" Sep 29 19:12:01 crc kubenswrapper[4779]: I0929 19:12:01.585717 4779 scope.go:117] "RemoveContainer" containerID="1815a78da9800fd70af7b6f95a157cb54b2aa46658209f68590f7cc62fefa31f" Sep 29 19:12:01 crc kubenswrapper[4779]: I0929 19:12:01.595365 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-4nts8" Sep 29 19:12:01 crc kubenswrapper[4779]: I0929 19:12:01.619546 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a0becbd8-a5ce-42a3-9afe-137668fb98fa-utilities\") pod \"a0becbd8-a5ce-42a3-9afe-137668fb98fa\" (UID: \"a0becbd8-a5ce-42a3-9afe-137668fb98fa\") " Sep 29 19:12:01 crc kubenswrapper[4779]: I0929 19:12:01.619657 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wcfz6\" (UniqueName: \"kubernetes.io/projected/a0becbd8-a5ce-42a3-9afe-137668fb98fa-kube-api-access-wcfz6\") pod \"a0becbd8-a5ce-42a3-9afe-137668fb98fa\" (UID: \"a0becbd8-a5ce-42a3-9afe-137668fb98fa\") " Sep 29 19:12:01 crc kubenswrapper[4779]: I0929 19:12:01.619718 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a0becbd8-a5ce-42a3-9afe-137668fb98fa-catalog-content\") pod \"a0becbd8-a5ce-42a3-9afe-137668fb98fa\" (UID: \"a0becbd8-a5ce-42a3-9afe-137668fb98fa\") " Sep 29 19:12:01 crc kubenswrapper[4779]: I0929 19:12:01.620328 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a0becbd8-a5ce-42a3-9afe-137668fb98fa-utilities" (OuterVolumeSpecName: "utilities") pod "a0becbd8-a5ce-42a3-9afe-137668fb98fa" (UID: "a0becbd8-a5ce-42a3-9afe-137668fb98fa"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 19:12:01 crc kubenswrapper[4779]: I0929 19:12:01.630587 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a0becbd8-a5ce-42a3-9afe-137668fb98fa-kube-api-access-wcfz6" (OuterVolumeSpecName: "kube-api-access-wcfz6") pod "a0becbd8-a5ce-42a3-9afe-137668fb98fa" (UID: "a0becbd8-a5ce-42a3-9afe-137668fb98fa"). InnerVolumeSpecName "kube-api-access-wcfz6". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:12:01 crc kubenswrapper[4779]: I0929 19:12:01.632896 4779 scope.go:117] "RemoveContainer" containerID="66e5b9f615bcfbbfdbf1e7cf8fed42905ad782c5c2b94a5055700ac4bf301dd0" Sep 29 19:12:01 crc kubenswrapper[4779]: E0929 19:12:01.634599 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"66e5b9f615bcfbbfdbf1e7cf8fed42905ad782c5c2b94a5055700ac4bf301dd0\": container with ID starting with 66e5b9f615bcfbbfdbf1e7cf8fed42905ad782c5c2b94a5055700ac4bf301dd0 not found: ID does not exist" containerID="66e5b9f615bcfbbfdbf1e7cf8fed42905ad782c5c2b94a5055700ac4bf301dd0" Sep 29 19:12:01 crc kubenswrapper[4779]: I0929 19:12:01.634687 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"66e5b9f615bcfbbfdbf1e7cf8fed42905ad782c5c2b94a5055700ac4bf301dd0"} err="failed to get container status \"66e5b9f615bcfbbfdbf1e7cf8fed42905ad782c5c2b94a5055700ac4bf301dd0\": rpc error: code = NotFound desc = could not find container \"66e5b9f615bcfbbfdbf1e7cf8fed42905ad782c5c2b94a5055700ac4bf301dd0\": container with ID starting with 66e5b9f615bcfbbfdbf1e7cf8fed42905ad782c5c2b94a5055700ac4bf301dd0 not found: ID does not exist" Sep 29 19:12:01 crc kubenswrapper[4779]: I0929 19:12:01.634749 4779 scope.go:117] "RemoveContainer" containerID="22ed9d81ea48f7e666fb4b360f0c6950a9412f8697ef0e60ee5a78604ee327a9" Sep 29 19:12:01 crc kubenswrapper[4779]: E0929 19:12:01.640691 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"22ed9d81ea48f7e666fb4b360f0c6950a9412f8697ef0e60ee5a78604ee327a9\": container with ID starting with 22ed9d81ea48f7e666fb4b360f0c6950a9412f8697ef0e60ee5a78604ee327a9 not found: ID does not exist" containerID="22ed9d81ea48f7e666fb4b360f0c6950a9412f8697ef0e60ee5a78604ee327a9" Sep 29 19:12:01 crc kubenswrapper[4779]: I0929 19:12:01.640760 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"22ed9d81ea48f7e666fb4b360f0c6950a9412f8697ef0e60ee5a78604ee327a9"} err="failed to get container status \"22ed9d81ea48f7e666fb4b360f0c6950a9412f8697ef0e60ee5a78604ee327a9\": rpc error: code = NotFound desc = could not find container \"22ed9d81ea48f7e666fb4b360f0c6950a9412f8697ef0e60ee5a78604ee327a9\": container with ID starting with 22ed9d81ea48f7e666fb4b360f0c6950a9412f8697ef0e60ee5a78604ee327a9 not found: ID does not exist" Sep 29 19:12:01 crc kubenswrapper[4779]: I0929 19:12:01.640798 4779 scope.go:117] "RemoveContainer" containerID="1815a78da9800fd70af7b6f95a157cb54b2aa46658209f68590f7cc62fefa31f" Sep 29 19:12:01 crc kubenswrapper[4779]: E0929 19:12:01.642613 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1815a78da9800fd70af7b6f95a157cb54b2aa46658209f68590f7cc62fefa31f\": container with ID starting with 1815a78da9800fd70af7b6f95a157cb54b2aa46658209f68590f7cc62fefa31f not found: ID does not exist" containerID="1815a78da9800fd70af7b6f95a157cb54b2aa46658209f68590f7cc62fefa31f" Sep 29 19:12:01 crc kubenswrapper[4779]: I0929 19:12:01.642706 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1815a78da9800fd70af7b6f95a157cb54b2aa46658209f68590f7cc62fefa31f"} err="failed to get container status \"1815a78da9800fd70af7b6f95a157cb54b2aa46658209f68590f7cc62fefa31f\": rpc error: code = NotFound desc = could not find container \"1815a78da9800fd70af7b6f95a157cb54b2aa46658209f68590f7cc62fefa31f\": container with ID starting with 1815a78da9800fd70af7b6f95a157cb54b2aa46658209f68590f7cc62fefa31f not found: ID does not exist" Sep 29 19:12:01 crc kubenswrapper[4779]: I0929 19:12:01.688117 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a0becbd8-a5ce-42a3-9afe-137668fb98fa-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "a0becbd8-a5ce-42a3-9afe-137668fb98fa" (UID: "a0becbd8-a5ce-42a3-9afe-137668fb98fa"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 19:12:01 crc kubenswrapper[4779]: I0929 19:12:01.721240 4779 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a0becbd8-a5ce-42a3-9afe-137668fb98fa-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 19:12:01 crc kubenswrapper[4779]: I0929 19:12:01.721266 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wcfz6\" (UniqueName: \"kubernetes.io/projected/a0becbd8-a5ce-42a3-9afe-137668fb98fa-kube-api-access-wcfz6\") on node \"crc\" DevicePath \"\"" Sep 29 19:12:01 crc kubenswrapper[4779]: I0929 19:12:01.721278 4779 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a0becbd8-a5ce-42a3-9afe-137668fb98fa-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 19:12:01 crc kubenswrapper[4779]: I0929 19:12:01.835878 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-22bm4" Sep 29 19:12:01 crc kubenswrapper[4779]: I0929 19:12:01.846397 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-xshk2"] Sep 29 19:12:01 crc kubenswrapper[4779]: I0929 19:12:01.848990 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-xshk2"] Sep 29 19:12:01 crc kubenswrapper[4779]: I0929 19:12:01.973757 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-7k5wr" Sep 29 19:12:02 crc kubenswrapper[4779]: I0929 19:12:02.011382 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-7k5wr" Sep 29 19:12:02 crc kubenswrapper[4779]: I0929 19:12:02.024721 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/68227c08-072e-4071-9206-2f34b9e9f1cf-utilities\") pod \"68227c08-072e-4071-9206-2f34b9e9f1cf\" (UID: \"68227c08-072e-4071-9206-2f34b9e9f1cf\") " Sep 29 19:12:02 crc kubenswrapper[4779]: I0929 19:12:02.024948 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dw9kg\" (UniqueName: \"kubernetes.io/projected/68227c08-072e-4071-9206-2f34b9e9f1cf-kube-api-access-dw9kg\") pod \"68227c08-072e-4071-9206-2f34b9e9f1cf\" (UID: \"68227c08-072e-4071-9206-2f34b9e9f1cf\") " Sep 29 19:12:02 crc kubenswrapper[4779]: I0929 19:12:02.024992 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/68227c08-072e-4071-9206-2f34b9e9f1cf-catalog-content\") pod \"68227c08-072e-4071-9206-2f34b9e9f1cf\" (UID: \"68227c08-072e-4071-9206-2f34b9e9f1cf\") " Sep 29 19:12:02 crc kubenswrapper[4779]: I0929 19:12:02.026050 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/68227c08-072e-4071-9206-2f34b9e9f1cf-utilities" (OuterVolumeSpecName: "utilities") pod "68227c08-072e-4071-9206-2f34b9e9f1cf" (UID: "68227c08-072e-4071-9206-2f34b9e9f1cf"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 19:12:02 crc kubenswrapper[4779]: I0929 19:12:02.032566 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/68227c08-072e-4071-9206-2f34b9e9f1cf-kube-api-access-dw9kg" (OuterVolumeSpecName: "kube-api-access-dw9kg") pod "68227c08-072e-4071-9206-2f34b9e9f1cf" (UID: "68227c08-072e-4071-9206-2f34b9e9f1cf"). InnerVolumeSpecName "kube-api-access-dw9kg". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:12:02 crc kubenswrapper[4779]: I0929 19:12:02.073046 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/68227c08-072e-4071-9206-2f34b9e9f1cf-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "68227c08-072e-4071-9206-2f34b9e9f1cf" (UID: "68227c08-072e-4071-9206-2f34b9e9f1cf"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 19:12:02 crc kubenswrapper[4779]: I0929 19:12:02.126130 4779 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/68227c08-072e-4071-9206-2f34b9e9f1cf-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 19:12:02 crc kubenswrapper[4779]: I0929 19:12:02.126165 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dw9kg\" (UniqueName: \"kubernetes.io/projected/68227c08-072e-4071-9206-2f34b9e9f1cf-kube-api-access-dw9kg\") on node \"crc\" DevicePath \"\"" Sep 29 19:12:02 crc kubenswrapper[4779]: I0929 19:12:02.126176 4779 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/68227c08-072e-4071-9206-2f34b9e9f1cf-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 19:12:02 crc kubenswrapper[4779]: I0929 19:12:02.523179 4779 generic.go:334] "Generic (PLEG): container finished" podID="68227c08-072e-4071-9206-2f34b9e9f1cf" containerID="bc387b9993cbdea292d381d97e5dda44cabc3b85d7aba3d2aa9c1a76dad65737" exitCode=0 Sep 29 19:12:02 crc kubenswrapper[4779]: I0929 19:12:02.523247 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-22bm4" Sep 29 19:12:02 crc kubenswrapper[4779]: I0929 19:12:02.523268 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-22bm4" event={"ID":"68227c08-072e-4071-9206-2f34b9e9f1cf","Type":"ContainerDied","Data":"bc387b9993cbdea292d381d97e5dda44cabc3b85d7aba3d2aa9c1a76dad65737"} Sep 29 19:12:02 crc kubenswrapper[4779]: I0929 19:12:02.523769 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-22bm4" event={"ID":"68227c08-072e-4071-9206-2f34b9e9f1cf","Type":"ContainerDied","Data":"f5e51fcb59d3966d3ceb7b3d5d556c5e49515af393b2460673adfb4fa7c9350c"} Sep 29 19:12:02 crc kubenswrapper[4779]: I0929 19:12:02.523801 4779 scope.go:117] "RemoveContainer" containerID="bc387b9993cbdea292d381d97e5dda44cabc3b85d7aba3d2aa9c1a76dad65737" Sep 29 19:12:02 crc kubenswrapper[4779]: I0929 19:12:02.542483 4779 scope.go:117] "RemoveContainer" containerID="42b4cb6a6c58e1984c5f8630fc4c97fd1e964680b02e7240c2b6ec13a556f92f" Sep 29 19:12:02 crc kubenswrapper[4779]: I0929 19:12:02.561441 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-22bm4"] Sep 29 19:12:02 crc kubenswrapper[4779]: I0929 19:12:02.563815 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-22bm4"] Sep 29 19:12:02 crc kubenswrapper[4779]: I0929 19:12:02.563881 4779 scope.go:117] "RemoveContainer" containerID="ed00058c3a36fbb809ca57fe749f10ba0e7ca8da205c8091882e7d2081477c73" Sep 29 19:12:02 crc kubenswrapper[4779]: I0929 19:12:02.585048 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-4nts8" Sep 29 19:12:02 crc kubenswrapper[4779]: I0929 19:12:02.596799 4779 scope.go:117] "RemoveContainer" containerID="bc387b9993cbdea292d381d97e5dda44cabc3b85d7aba3d2aa9c1a76dad65737" Sep 29 19:12:02 crc kubenswrapper[4779]: E0929 19:12:02.597385 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bc387b9993cbdea292d381d97e5dda44cabc3b85d7aba3d2aa9c1a76dad65737\": container with ID starting with bc387b9993cbdea292d381d97e5dda44cabc3b85d7aba3d2aa9c1a76dad65737 not found: ID does not exist" containerID="bc387b9993cbdea292d381d97e5dda44cabc3b85d7aba3d2aa9c1a76dad65737" Sep 29 19:12:02 crc kubenswrapper[4779]: I0929 19:12:02.597425 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bc387b9993cbdea292d381d97e5dda44cabc3b85d7aba3d2aa9c1a76dad65737"} err="failed to get container status \"bc387b9993cbdea292d381d97e5dda44cabc3b85d7aba3d2aa9c1a76dad65737\": rpc error: code = NotFound desc = could not find container \"bc387b9993cbdea292d381d97e5dda44cabc3b85d7aba3d2aa9c1a76dad65737\": container with ID starting with bc387b9993cbdea292d381d97e5dda44cabc3b85d7aba3d2aa9c1a76dad65737 not found: ID does not exist" Sep 29 19:12:02 crc kubenswrapper[4779]: I0929 19:12:02.597452 4779 scope.go:117] "RemoveContainer" containerID="42b4cb6a6c58e1984c5f8630fc4c97fd1e964680b02e7240c2b6ec13a556f92f" Sep 29 19:12:02 crc kubenswrapper[4779]: E0929 19:12:02.598171 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"42b4cb6a6c58e1984c5f8630fc4c97fd1e964680b02e7240c2b6ec13a556f92f\": container with ID starting with 42b4cb6a6c58e1984c5f8630fc4c97fd1e964680b02e7240c2b6ec13a556f92f not found: ID does not exist" containerID="42b4cb6a6c58e1984c5f8630fc4c97fd1e964680b02e7240c2b6ec13a556f92f" Sep 29 19:12:02 crc kubenswrapper[4779]: I0929 19:12:02.598286 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"42b4cb6a6c58e1984c5f8630fc4c97fd1e964680b02e7240c2b6ec13a556f92f"} err="failed to get container status \"42b4cb6a6c58e1984c5f8630fc4c97fd1e964680b02e7240c2b6ec13a556f92f\": rpc error: code = NotFound desc = could not find container \"42b4cb6a6c58e1984c5f8630fc4c97fd1e964680b02e7240c2b6ec13a556f92f\": container with ID starting with 42b4cb6a6c58e1984c5f8630fc4c97fd1e964680b02e7240c2b6ec13a556f92f not found: ID does not exist" Sep 29 19:12:02 crc kubenswrapper[4779]: I0929 19:12:02.598375 4779 scope.go:117] "RemoveContainer" containerID="ed00058c3a36fbb809ca57fe749f10ba0e7ca8da205c8091882e7d2081477c73" Sep 29 19:12:02 crc kubenswrapper[4779]: E0929 19:12:02.599028 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ed00058c3a36fbb809ca57fe749f10ba0e7ca8da205c8091882e7d2081477c73\": container with ID starting with ed00058c3a36fbb809ca57fe749f10ba0e7ca8da205c8091882e7d2081477c73 not found: ID does not exist" containerID="ed00058c3a36fbb809ca57fe749f10ba0e7ca8da205c8091882e7d2081477c73" Sep 29 19:12:02 crc kubenswrapper[4779]: I0929 19:12:02.599062 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ed00058c3a36fbb809ca57fe749f10ba0e7ca8da205c8091882e7d2081477c73"} err="failed to get container status \"ed00058c3a36fbb809ca57fe749f10ba0e7ca8da205c8091882e7d2081477c73\": rpc error: code = NotFound desc = could not find container \"ed00058c3a36fbb809ca57fe749f10ba0e7ca8da205c8091882e7d2081477c73\": container with ID starting with ed00058c3a36fbb809ca57fe749f10ba0e7ca8da205c8091882e7d2081477c73 not found: ID does not exist" Sep 29 19:12:03 crc kubenswrapper[4779]: I0929 19:12:03.774203 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="68227c08-072e-4071-9206-2f34b9e9f1cf" path="/var/lib/kubelet/pods/68227c08-072e-4071-9206-2f34b9e9f1cf/volumes" Sep 29 19:12:03 crc kubenswrapper[4779]: I0929 19:12:03.775186 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a0becbd8-a5ce-42a3-9afe-137668fb98fa" path="/var/lib/kubelet/pods/a0becbd8-a5ce-42a3-9afe-137668fb98fa/volumes" Sep 29 19:12:03 crc kubenswrapper[4779]: I0929 19:12:03.860270 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-mkktm"] Sep 29 19:12:03 crc kubenswrapper[4779]: I0929 19:12:03.860773 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-mkktm" podUID="9209519d-4c0e-4735-aa5d-731cdb7543a9" containerName="registry-server" containerID="cri-o://3125bec3559399f07ba0644715c7bfb2cc7489253cb66263fe0261e2a85ec01f" gracePeriod=2 Sep 29 19:12:04 crc kubenswrapper[4779]: I0929 19:12:04.186950 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-mkktm" Sep 29 19:12:04 crc kubenswrapper[4779]: I0929 19:12:04.354975 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9209519d-4c0e-4735-aa5d-731cdb7543a9-catalog-content\") pod \"9209519d-4c0e-4735-aa5d-731cdb7543a9\" (UID: \"9209519d-4c0e-4735-aa5d-731cdb7543a9\") " Sep 29 19:12:04 crc kubenswrapper[4779]: I0929 19:12:04.355051 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-t6cx5\" (UniqueName: \"kubernetes.io/projected/9209519d-4c0e-4735-aa5d-731cdb7543a9-kube-api-access-t6cx5\") pod \"9209519d-4c0e-4735-aa5d-731cdb7543a9\" (UID: \"9209519d-4c0e-4735-aa5d-731cdb7543a9\") " Sep 29 19:12:04 crc kubenswrapper[4779]: I0929 19:12:04.355111 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9209519d-4c0e-4735-aa5d-731cdb7543a9-utilities\") pod \"9209519d-4c0e-4735-aa5d-731cdb7543a9\" (UID: \"9209519d-4c0e-4735-aa5d-731cdb7543a9\") " Sep 29 19:12:04 crc kubenswrapper[4779]: I0929 19:12:04.360542 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9209519d-4c0e-4735-aa5d-731cdb7543a9-utilities" (OuterVolumeSpecName: "utilities") pod "9209519d-4c0e-4735-aa5d-731cdb7543a9" (UID: "9209519d-4c0e-4735-aa5d-731cdb7543a9"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 19:12:04 crc kubenswrapper[4779]: I0929 19:12:04.374893 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9209519d-4c0e-4735-aa5d-731cdb7543a9-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "9209519d-4c0e-4735-aa5d-731cdb7543a9" (UID: "9209519d-4c0e-4735-aa5d-731cdb7543a9"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 19:12:04 crc kubenswrapper[4779]: I0929 19:12:04.383108 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9209519d-4c0e-4735-aa5d-731cdb7543a9-kube-api-access-t6cx5" (OuterVolumeSpecName: "kube-api-access-t6cx5") pod "9209519d-4c0e-4735-aa5d-731cdb7543a9" (UID: "9209519d-4c0e-4735-aa5d-731cdb7543a9"). InnerVolumeSpecName "kube-api-access-t6cx5". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:12:04 crc kubenswrapper[4779]: I0929 19:12:04.456021 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-t6cx5\" (UniqueName: \"kubernetes.io/projected/9209519d-4c0e-4735-aa5d-731cdb7543a9-kube-api-access-t6cx5\") on node \"crc\" DevicePath \"\"" Sep 29 19:12:04 crc kubenswrapper[4779]: I0929 19:12:04.456065 4779 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9209519d-4c0e-4735-aa5d-731cdb7543a9-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 19:12:04 crc kubenswrapper[4779]: I0929 19:12:04.456076 4779 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9209519d-4c0e-4735-aa5d-731cdb7543a9-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 19:12:04 crc kubenswrapper[4779]: I0929 19:12:04.537183 4779 generic.go:334] "Generic (PLEG): container finished" podID="9209519d-4c0e-4735-aa5d-731cdb7543a9" containerID="3125bec3559399f07ba0644715c7bfb2cc7489253cb66263fe0261e2a85ec01f" exitCode=0 Sep 29 19:12:04 crc kubenswrapper[4779]: I0929 19:12:04.537248 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mkktm" event={"ID":"9209519d-4c0e-4735-aa5d-731cdb7543a9","Type":"ContainerDied","Data":"3125bec3559399f07ba0644715c7bfb2cc7489253cb66263fe0261e2a85ec01f"} Sep 29 19:12:04 crc kubenswrapper[4779]: I0929 19:12:04.537303 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mkktm" event={"ID":"9209519d-4c0e-4735-aa5d-731cdb7543a9","Type":"ContainerDied","Data":"938dbf80cb5b4d813b54a1abbd826887d81eed6945989b3a9e9e7da0ef011865"} Sep 29 19:12:04 crc kubenswrapper[4779]: I0929 19:12:04.537336 4779 scope.go:117] "RemoveContainer" containerID="3125bec3559399f07ba0644715c7bfb2cc7489253cb66263fe0261e2a85ec01f" Sep 29 19:12:04 crc kubenswrapper[4779]: I0929 19:12:04.537628 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-mkktm" Sep 29 19:12:04 crc kubenswrapper[4779]: I0929 19:12:04.550973 4779 scope.go:117] "RemoveContainer" containerID="e01abc43d3a75ea5da74cd1f64417c78e47d5662ad17017bc52c41781fbefa0c" Sep 29 19:12:04 crc kubenswrapper[4779]: I0929 19:12:04.565692 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-mkktm"] Sep 29 19:12:04 crc kubenswrapper[4779]: I0929 19:12:04.569394 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-mkktm"] Sep 29 19:12:04 crc kubenswrapper[4779]: I0929 19:12:04.569604 4779 scope.go:117] "RemoveContainer" containerID="d3211d592c5b6774d70a0946ab95002dd1a3943f9685f3c2abda6ba8268bc1a4" Sep 29 19:12:04 crc kubenswrapper[4779]: I0929 19:12:04.603554 4779 scope.go:117] "RemoveContainer" containerID="3125bec3559399f07ba0644715c7bfb2cc7489253cb66263fe0261e2a85ec01f" Sep 29 19:12:04 crc kubenswrapper[4779]: E0929 19:12:04.603969 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3125bec3559399f07ba0644715c7bfb2cc7489253cb66263fe0261e2a85ec01f\": container with ID starting with 3125bec3559399f07ba0644715c7bfb2cc7489253cb66263fe0261e2a85ec01f not found: ID does not exist" containerID="3125bec3559399f07ba0644715c7bfb2cc7489253cb66263fe0261e2a85ec01f" Sep 29 19:12:04 crc kubenswrapper[4779]: I0929 19:12:04.604016 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3125bec3559399f07ba0644715c7bfb2cc7489253cb66263fe0261e2a85ec01f"} err="failed to get container status \"3125bec3559399f07ba0644715c7bfb2cc7489253cb66263fe0261e2a85ec01f\": rpc error: code = NotFound desc = could not find container \"3125bec3559399f07ba0644715c7bfb2cc7489253cb66263fe0261e2a85ec01f\": container with ID starting with 3125bec3559399f07ba0644715c7bfb2cc7489253cb66263fe0261e2a85ec01f not found: ID does not exist" Sep 29 19:12:04 crc kubenswrapper[4779]: I0929 19:12:04.604042 4779 scope.go:117] "RemoveContainer" containerID="e01abc43d3a75ea5da74cd1f64417c78e47d5662ad17017bc52c41781fbefa0c" Sep 29 19:12:04 crc kubenswrapper[4779]: E0929 19:12:04.604612 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e01abc43d3a75ea5da74cd1f64417c78e47d5662ad17017bc52c41781fbefa0c\": container with ID starting with e01abc43d3a75ea5da74cd1f64417c78e47d5662ad17017bc52c41781fbefa0c not found: ID does not exist" containerID="e01abc43d3a75ea5da74cd1f64417c78e47d5662ad17017bc52c41781fbefa0c" Sep 29 19:12:04 crc kubenswrapper[4779]: I0929 19:12:04.604782 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e01abc43d3a75ea5da74cd1f64417c78e47d5662ad17017bc52c41781fbefa0c"} err="failed to get container status \"e01abc43d3a75ea5da74cd1f64417c78e47d5662ad17017bc52c41781fbefa0c\": rpc error: code = NotFound desc = could not find container \"e01abc43d3a75ea5da74cd1f64417c78e47d5662ad17017bc52c41781fbefa0c\": container with ID starting with e01abc43d3a75ea5da74cd1f64417c78e47d5662ad17017bc52c41781fbefa0c not found: ID does not exist" Sep 29 19:12:04 crc kubenswrapper[4779]: I0929 19:12:04.604947 4779 scope.go:117] "RemoveContainer" containerID="d3211d592c5b6774d70a0946ab95002dd1a3943f9685f3c2abda6ba8268bc1a4" Sep 29 19:12:04 crc kubenswrapper[4779]: E0929 19:12:04.605497 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d3211d592c5b6774d70a0946ab95002dd1a3943f9685f3c2abda6ba8268bc1a4\": container with ID starting with d3211d592c5b6774d70a0946ab95002dd1a3943f9685f3c2abda6ba8268bc1a4 not found: ID does not exist" containerID="d3211d592c5b6774d70a0946ab95002dd1a3943f9685f3c2abda6ba8268bc1a4" Sep 29 19:12:04 crc kubenswrapper[4779]: I0929 19:12:04.605655 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d3211d592c5b6774d70a0946ab95002dd1a3943f9685f3c2abda6ba8268bc1a4"} err="failed to get container status \"d3211d592c5b6774d70a0946ab95002dd1a3943f9685f3c2abda6ba8268bc1a4\": rpc error: code = NotFound desc = could not find container \"d3211d592c5b6774d70a0946ab95002dd1a3943f9685f3c2abda6ba8268bc1a4\": container with ID starting with d3211d592c5b6774d70a0946ab95002dd1a3943f9685f3c2abda6ba8268bc1a4 not found: ID does not exist" Sep 29 19:12:05 crc kubenswrapper[4779]: I0929 19:12:05.773749 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9209519d-4c0e-4735-aa5d-731cdb7543a9" path="/var/lib/kubelet/pods/9209519d-4c0e-4735-aa5d-731cdb7543a9/volumes" Sep 29 19:12:06 crc kubenswrapper[4779]: I0929 19:12:06.262689 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-7k5wr"] Sep 29 19:12:06 crc kubenswrapper[4779]: I0929 19:12:06.262930 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-7k5wr" podUID="f89bc289-c3b4-4ca5-843b-b9b6ec104ede" containerName="registry-server" containerID="cri-o://690f2500525c371ab67351244cd385f0a75588b0ea05117ea08058b45a134264" gracePeriod=2 Sep 29 19:12:06 crc kubenswrapper[4779]: I0929 19:12:06.562263 4779 generic.go:334] "Generic (PLEG): container finished" podID="f89bc289-c3b4-4ca5-843b-b9b6ec104ede" containerID="690f2500525c371ab67351244cd385f0a75588b0ea05117ea08058b45a134264" exitCode=0 Sep 29 19:12:06 crc kubenswrapper[4779]: I0929 19:12:06.562349 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-7k5wr" event={"ID":"f89bc289-c3b4-4ca5-843b-b9b6ec104ede","Type":"ContainerDied","Data":"690f2500525c371ab67351244cd385f0a75588b0ea05117ea08058b45a134264"} Sep 29 19:12:06 crc kubenswrapper[4779]: I0929 19:12:06.562549 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-7k5wr" event={"ID":"f89bc289-c3b4-4ca5-843b-b9b6ec104ede","Type":"ContainerDied","Data":"fe927644b8b61292ac5f95da7594e4398eea57eef6dc130e6c866d25c41b863e"} Sep 29 19:12:06 crc kubenswrapper[4779]: I0929 19:12:06.562565 4779 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="fe927644b8b61292ac5f95da7594e4398eea57eef6dc130e6c866d25c41b863e" Sep 29 19:12:06 crc kubenswrapper[4779]: I0929 19:12:06.586491 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-7k5wr" Sep 29 19:12:06 crc kubenswrapper[4779]: I0929 19:12:06.683157 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f89bc289-c3b4-4ca5-843b-b9b6ec104ede-catalog-content\") pod \"f89bc289-c3b4-4ca5-843b-b9b6ec104ede\" (UID: \"f89bc289-c3b4-4ca5-843b-b9b6ec104ede\") " Sep 29 19:12:06 crc kubenswrapper[4779]: I0929 19:12:06.683264 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f89bc289-c3b4-4ca5-843b-b9b6ec104ede-utilities\") pod \"f89bc289-c3b4-4ca5-843b-b9b6ec104ede\" (UID: \"f89bc289-c3b4-4ca5-843b-b9b6ec104ede\") " Sep 29 19:12:06 crc kubenswrapper[4779]: I0929 19:12:06.683424 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cgph4\" (UniqueName: \"kubernetes.io/projected/f89bc289-c3b4-4ca5-843b-b9b6ec104ede-kube-api-access-cgph4\") pod \"f89bc289-c3b4-4ca5-843b-b9b6ec104ede\" (UID: \"f89bc289-c3b4-4ca5-843b-b9b6ec104ede\") " Sep 29 19:12:06 crc kubenswrapper[4779]: I0929 19:12:06.684313 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f89bc289-c3b4-4ca5-843b-b9b6ec104ede-utilities" (OuterVolumeSpecName: "utilities") pod "f89bc289-c3b4-4ca5-843b-b9b6ec104ede" (UID: "f89bc289-c3b4-4ca5-843b-b9b6ec104ede"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 19:12:06 crc kubenswrapper[4779]: I0929 19:12:06.691166 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f89bc289-c3b4-4ca5-843b-b9b6ec104ede-kube-api-access-cgph4" (OuterVolumeSpecName: "kube-api-access-cgph4") pod "f89bc289-c3b4-4ca5-843b-b9b6ec104ede" (UID: "f89bc289-c3b4-4ca5-843b-b9b6ec104ede"). InnerVolumeSpecName "kube-api-access-cgph4". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:12:06 crc kubenswrapper[4779]: I0929 19:12:06.770566 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f89bc289-c3b4-4ca5-843b-b9b6ec104ede-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "f89bc289-c3b4-4ca5-843b-b9b6ec104ede" (UID: "f89bc289-c3b4-4ca5-843b-b9b6ec104ede"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 19:12:06 crc kubenswrapper[4779]: I0929 19:12:06.785010 4779 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f89bc289-c3b4-4ca5-843b-b9b6ec104ede-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 19:12:06 crc kubenswrapper[4779]: I0929 19:12:06.785049 4779 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f89bc289-c3b4-4ca5-843b-b9b6ec104ede-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 19:12:06 crc kubenswrapper[4779]: I0929 19:12:06.785063 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cgph4\" (UniqueName: \"kubernetes.io/projected/f89bc289-c3b4-4ca5-843b-b9b6ec104ede-kube-api-access-cgph4\") on node \"crc\" DevicePath \"\"" Sep 29 19:12:07 crc kubenswrapper[4779]: I0929 19:12:07.566392 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-7k5wr" Sep 29 19:12:07 crc kubenswrapper[4779]: I0929 19:12:07.600217 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-7k5wr"] Sep 29 19:12:07 crc kubenswrapper[4779]: I0929 19:12:07.603955 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-7k5wr"] Sep 29 19:12:07 crc kubenswrapper[4779]: I0929 19:12:07.772857 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f89bc289-c3b4-4ca5-843b-b9b6ec104ede" path="/var/lib/kubelet/pods/f89bc289-c3b4-4ca5-843b-b9b6ec104ede/volumes" Sep 29 19:12:10 crc kubenswrapper[4779]: I0929 19:12:10.817894 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-pjjch"] Sep 29 19:12:35 crc kubenswrapper[4779]: I0929 19:12:35.842537 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-authentication/oauth-openshift-558db77b4-pjjch" podUID="29b802ef-670a-46bb-9ad3-03bddd7dc682" containerName="oauth-openshift" containerID="cri-o://13296d5bc1c28085d42c4d36090b3dc006a5af303010df3d36798f8679946979" gracePeriod=15 Sep 29 19:12:36 crc kubenswrapper[4779]: I0929 19:12:36.198235 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-pjjch" Sep 29 19:12:36 crc kubenswrapper[4779]: I0929 19:12:36.241185 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-555fcf5468-lj5l4"] Sep 29 19:12:36 crc kubenswrapper[4779]: E0929 19:12:36.242015 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f188a3c4-156e-4269-87c4-48826a003674" containerName="pruner" Sep 29 19:12:36 crc kubenswrapper[4779]: I0929 19:12:36.242039 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="f188a3c4-156e-4269-87c4-48826a003674" containerName="pruner" Sep 29 19:12:36 crc kubenswrapper[4779]: E0929 19:12:36.242051 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9209519d-4c0e-4735-aa5d-731cdb7543a9" containerName="extract-utilities" Sep 29 19:12:36 crc kubenswrapper[4779]: I0929 19:12:36.242059 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="9209519d-4c0e-4735-aa5d-731cdb7543a9" containerName="extract-utilities" Sep 29 19:12:36 crc kubenswrapper[4779]: E0929 19:12:36.242074 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f89bc289-c3b4-4ca5-843b-b9b6ec104ede" containerName="extract-content" Sep 29 19:12:36 crc kubenswrapper[4779]: I0929 19:12:36.242082 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="f89bc289-c3b4-4ca5-843b-b9b6ec104ede" containerName="extract-content" Sep 29 19:12:36 crc kubenswrapper[4779]: E0929 19:12:36.242094 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9413a4c1-e317-4da6-af34-34fd6beaddd0" containerName="pruner" Sep 29 19:12:36 crc kubenswrapper[4779]: I0929 19:12:36.242100 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="9413a4c1-e317-4da6-af34-34fd6beaddd0" containerName="pruner" Sep 29 19:12:36 crc kubenswrapper[4779]: E0929 19:12:36.242113 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="29b802ef-670a-46bb-9ad3-03bddd7dc682" containerName="oauth-openshift" Sep 29 19:12:36 crc kubenswrapper[4779]: I0929 19:12:36.242120 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="29b802ef-670a-46bb-9ad3-03bddd7dc682" containerName="oauth-openshift" Sep 29 19:12:36 crc kubenswrapper[4779]: E0929 19:12:36.242128 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="68227c08-072e-4071-9206-2f34b9e9f1cf" containerName="extract-utilities" Sep 29 19:12:36 crc kubenswrapper[4779]: I0929 19:12:36.242136 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="68227c08-072e-4071-9206-2f34b9e9f1cf" containerName="extract-utilities" Sep 29 19:12:36 crc kubenswrapper[4779]: E0929 19:12:36.242146 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9209519d-4c0e-4735-aa5d-731cdb7543a9" containerName="extract-content" Sep 29 19:12:36 crc kubenswrapper[4779]: I0929 19:12:36.242153 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="9209519d-4c0e-4735-aa5d-731cdb7543a9" containerName="extract-content" Sep 29 19:12:36 crc kubenswrapper[4779]: E0929 19:12:36.242161 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f89bc289-c3b4-4ca5-843b-b9b6ec104ede" containerName="extract-utilities" Sep 29 19:12:36 crc kubenswrapper[4779]: I0929 19:12:36.242168 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="f89bc289-c3b4-4ca5-843b-b9b6ec104ede" containerName="extract-utilities" Sep 29 19:12:36 crc kubenswrapper[4779]: E0929 19:12:36.242179 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2d999e79-a467-4f19-a67a-f5993c6b4423" containerName="collect-profiles" Sep 29 19:12:36 crc kubenswrapper[4779]: I0929 19:12:36.242186 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="2d999e79-a467-4f19-a67a-f5993c6b4423" containerName="collect-profiles" Sep 29 19:12:36 crc kubenswrapper[4779]: E0929 19:12:36.242197 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9209519d-4c0e-4735-aa5d-731cdb7543a9" containerName="registry-server" Sep 29 19:12:36 crc kubenswrapper[4779]: I0929 19:12:36.242204 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="9209519d-4c0e-4735-aa5d-731cdb7543a9" containerName="registry-server" Sep 29 19:12:36 crc kubenswrapper[4779]: E0929 19:12:36.242213 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a0becbd8-a5ce-42a3-9afe-137668fb98fa" containerName="extract-utilities" Sep 29 19:12:36 crc kubenswrapper[4779]: I0929 19:12:36.242221 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="a0becbd8-a5ce-42a3-9afe-137668fb98fa" containerName="extract-utilities" Sep 29 19:12:36 crc kubenswrapper[4779]: E0929 19:12:36.242228 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="68227c08-072e-4071-9206-2f34b9e9f1cf" containerName="extract-content" Sep 29 19:12:36 crc kubenswrapper[4779]: I0929 19:12:36.242234 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="68227c08-072e-4071-9206-2f34b9e9f1cf" containerName="extract-content" Sep 29 19:12:36 crc kubenswrapper[4779]: E0929 19:12:36.242242 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a0becbd8-a5ce-42a3-9afe-137668fb98fa" containerName="registry-server" Sep 29 19:12:36 crc kubenswrapper[4779]: I0929 19:12:36.242248 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="a0becbd8-a5ce-42a3-9afe-137668fb98fa" containerName="registry-server" Sep 29 19:12:36 crc kubenswrapper[4779]: E0929 19:12:36.242257 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="68227c08-072e-4071-9206-2f34b9e9f1cf" containerName="registry-server" Sep 29 19:12:36 crc kubenswrapper[4779]: I0929 19:12:36.242264 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="68227c08-072e-4071-9206-2f34b9e9f1cf" containerName="registry-server" Sep 29 19:12:36 crc kubenswrapper[4779]: E0929 19:12:36.242276 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f89bc289-c3b4-4ca5-843b-b9b6ec104ede" containerName="registry-server" Sep 29 19:12:36 crc kubenswrapper[4779]: I0929 19:12:36.242283 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="f89bc289-c3b4-4ca5-843b-b9b6ec104ede" containerName="registry-server" Sep 29 19:12:36 crc kubenswrapper[4779]: E0929 19:12:36.242294 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a0becbd8-a5ce-42a3-9afe-137668fb98fa" containerName="extract-content" Sep 29 19:12:36 crc kubenswrapper[4779]: I0929 19:12:36.242302 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="a0becbd8-a5ce-42a3-9afe-137668fb98fa" containerName="extract-content" Sep 29 19:12:36 crc kubenswrapper[4779]: I0929 19:12:36.242444 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="9413a4c1-e317-4da6-af34-34fd6beaddd0" containerName="pruner" Sep 29 19:12:36 crc kubenswrapper[4779]: I0929 19:12:36.242457 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="2d999e79-a467-4f19-a67a-f5993c6b4423" containerName="collect-profiles" Sep 29 19:12:36 crc kubenswrapper[4779]: I0929 19:12:36.242467 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="f89bc289-c3b4-4ca5-843b-b9b6ec104ede" containerName="registry-server" Sep 29 19:12:36 crc kubenswrapper[4779]: I0929 19:12:36.242474 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="a0becbd8-a5ce-42a3-9afe-137668fb98fa" containerName="registry-server" Sep 29 19:12:36 crc kubenswrapper[4779]: I0929 19:12:36.242481 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="f188a3c4-156e-4269-87c4-48826a003674" containerName="pruner" Sep 29 19:12:36 crc kubenswrapper[4779]: I0929 19:12:36.242488 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="68227c08-072e-4071-9206-2f34b9e9f1cf" containerName="registry-server" Sep 29 19:12:36 crc kubenswrapper[4779]: I0929 19:12:36.242497 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="9209519d-4c0e-4735-aa5d-731cdb7543a9" containerName="registry-server" Sep 29 19:12:36 crc kubenswrapper[4779]: I0929 19:12:36.242506 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="29b802ef-670a-46bb-9ad3-03bddd7dc682" containerName="oauth-openshift" Sep 29 19:12:36 crc kubenswrapper[4779]: I0929 19:12:36.242832 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-555fcf5468-lj5l4" Sep 29 19:12:36 crc kubenswrapper[4779]: I0929 19:12:36.254926 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-555fcf5468-lj5l4"] Sep 29 19:12:36 crc kubenswrapper[4779]: I0929 19:12:36.280685 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/29b802ef-670a-46bb-9ad3-03bddd7dc682-v4-0-config-system-service-ca\") pod \"29b802ef-670a-46bb-9ad3-03bddd7dc682\" (UID: \"29b802ef-670a-46bb-9ad3-03bddd7dc682\") " Sep 29 19:12:36 crc kubenswrapper[4779]: I0929 19:12:36.280741 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/29b802ef-670a-46bb-9ad3-03bddd7dc682-audit-policies\") pod \"29b802ef-670a-46bb-9ad3-03bddd7dc682\" (UID: \"29b802ef-670a-46bb-9ad3-03bddd7dc682\") " Sep 29 19:12:36 crc kubenswrapper[4779]: I0929 19:12:36.280774 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/29b802ef-670a-46bb-9ad3-03bddd7dc682-v4-0-config-system-router-certs\") pod \"29b802ef-670a-46bb-9ad3-03bddd7dc682\" (UID: \"29b802ef-670a-46bb-9ad3-03bddd7dc682\") " Sep 29 19:12:36 crc kubenswrapper[4779]: I0929 19:12:36.280794 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/29b802ef-670a-46bb-9ad3-03bddd7dc682-v4-0-config-user-template-error\") pod \"29b802ef-670a-46bb-9ad3-03bddd7dc682\" (UID: \"29b802ef-670a-46bb-9ad3-03bddd7dc682\") " Sep 29 19:12:36 crc kubenswrapper[4779]: I0929 19:12:36.280819 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/29b802ef-670a-46bb-9ad3-03bddd7dc682-v4-0-config-system-cliconfig\") pod \"29b802ef-670a-46bb-9ad3-03bddd7dc682\" (UID: \"29b802ef-670a-46bb-9ad3-03bddd7dc682\") " Sep 29 19:12:36 crc kubenswrapper[4779]: I0929 19:12:36.280840 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/29b802ef-670a-46bb-9ad3-03bddd7dc682-v4-0-config-system-session\") pod \"29b802ef-670a-46bb-9ad3-03bddd7dc682\" (UID: \"29b802ef-670a-46bb-9ad3-03bddd7dc682\") " Sep 29 19:12:36 crc kubenswrapper[4779]: I0929 19:12:36.280862 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/29b802ef-670a-46bb-9ad3-03bddd7dc682-v4-0-config-system-trusted-ca-bundle\") pod \"29b802ef-670a-46bb-9ad3-03bddd7dc682\" (UID: \"29b802ef-670a-46bb-9ad3-03bddd7dc682\") " Sep 29 19:12:36 crc kubenswrapper[4779]: I0929 19:12:36.280898 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/29b802ef-670a-46bb-9ad3-03bddd7dc682-v4-0-config-user-template-provider-selection\") pod \"29b802ef-670a-46bb-9ad3-03bddd7dc682\" (UID: \"29b802ef-670a-46bb-9ad3-03bddd7dc682\") " Sep 29 19:12:36 crc kubenswrapper[4779]: I0929 19:12:36.280923 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/29b802ef-670a-46bb-9ad3-03bddd7dc682-v4-0-config-system-serving-cert\") pod \"29b802ef-670a-46bb-9ad3-03bddd7dc682\" (UID: \"29b802ef-670a-46bb-9ad3-03bddd7dc682\") " Sep 29 19:12:36 crc kubenswrapper[4779]: I0929 19:12:36.280943 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/29b802ef-670a-46bb-9ad3-03bddd7dc682-v4-0-config-user-idp-0-file-data\") pod \"29b802ef-670a-46bb-9ad3-03bddd7dc682\" (UID: \"29b802ef-670a-46bb-9ad3-03bddd7dc682\") " Sep 29 19:12:36 crc kubenswrapper[4779]: I0929 19:12:36.280967 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/29b802ef-670a-46bb-9ad3-03bddd7dc682-v4-0-config-system-ocp-branding-template\") pod \"29b802ef-670a-46bb-9ad3-03bddd7dc682\" (UID: \"29b802ef-670a-46bb-9ad3-03bddd7dc682\") " Sep 29 19:12:36 crc kubenswrapper[4779]: I0929 19:12:36.280990 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/29b802ef-670a-46bb-9ad3-03bddd7dc682-v4-0-config-user-template-login\") pod \"29b802ef-670a-46bb-9ad3-03bddd7dc682\" (UID: \"29b802ef-670a-46bb-9ad3-03bddd7dc682\") " Sep 29 19:12:36 crc kubenswrapper[4779]: I0929 19:12:36.281019 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-q9n87\" (UniqueName: \"kubernetes.io/projected/29b802ef-670a-46bb-9ad3-03bddd7dc682-kube-api-access-q9n87\") pod \"29b802ef-670a-46bb-9ad3-03bddd7dc682\" (UID: \"29b802ef-670a-46bb-9ad3-03bddd7dc682\") " Sep 29 19:12:36 crc kubenswrapper[4779]: I0929 19:12:36.281039 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/29b802ef-670a-46bb-9ad3-03bddd7dc682-audit-dir\") pod \"29b802ef-670a-46bb-9ad3-03bddd7dc682\" (UID: \"29b802ef-670a-46bb-9ad3-03bddd7dc682\") " Sep 29 19:12:36 crc kubenswrapper[4779]: I0929 19:12:36.281117 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/040007e5-696d-4ee5-afaf-c68e56718a1c-v4-0-config-user-template-error\") pod \"oauth-openshift-555fcf5468-lj5l4\" (UID: \"040007e5-696d-4ee5-afaf-c68e56718a1c\") " pod="openshift-authentication/oauth-openshift-555fcf5468-lj5l4" Sep 29 19:12:36 crc kubenswrapper[4779]: I0929 19:12:36.281144 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/040007e5-696d-4ee5-afaf-c68e56718a1c-audit-dir\") pod \"oauth-openshift-555fcf5468-lj5l4\" (UID: \"040007e5-696d-4ee5-afaf-c68e56718a1c\") " pod="openshift-authentication/oauth-openshift-555fcf5468-lj5l4" Sep 29 19:12:36 crc kubenswrapper[4779]: I0929 19:12:36.281180 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/040007e5-696d-4ee5-afaf-c68e56718a1c-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-555fcf5468-lj5l4\" (UID: \"040007e5-696d-4ee5-afaf-c68e56718a1c\") " pod="openshift-authentication/oauth-openshift-555fcf5468-lj5l4" Sep 29 19:12:36 crc kubenswrapper[4779]: I0929 19:12:36.281200 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/040007e5-696d-4ee5-afaf-c68e56718a1c-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-555fcf5468-lj5l4\" (UID: \"040007e5-696d-4ee5-afaf-c68e56718a1c\") " pod="openshift-authentication/oauth-openshift-555fcf5468-lj5l4" Sep 29 19:12:36 crc kubenswrapper[4779]: I0929 19:12:36.281222 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dn6qv\" (UniqueName: \"kubernetes.io/projected/040007e5-696d-4ee5-afaf-c68e56718a1c-kube-api-access-dn6qv\") pod \"oauth-openshift-555fcf5468-lj5l4\" (UID: \"040007e5-696d-4ee5-afaf-c68e56718a1c\") " pod="openshift-authentication/oauth-openshift-555fcf5468-lj5l4" Sep 29 19:12:36 crc kubenswrapper[4779]: I0929 19:12:36.281252 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/040007e5-696d-4ee5-afaf-c68e56718a1c-v4-0-config-system-router-certs\") pod \"oauth-openshift-555fcf5468-lj5l4\" (UID: \"040007e5-696d-4ee5-afaf-c68e56718a1c\") " pod="openshift-authentication/oauth-openshift-555fcf5468-lj5l4" Sep 29 19:12:36 crc kubenswrapper[4779]: I0929 19:12:36.281276 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/040007e5-696d-4ee5-afaf-c68e56718a1c-v4-0-config-system-cliconfig\") pod \"oauth-openshift-555fcf5468-lj5l4\" (UID: \"040007e5-696d-4ee5-afaf-c68e56718a1c\") " pod="openshift-authentication/oauth-openshift-555fcf5468-lj5l4" Sep 29 19:12:36 crc kubenswrapper[4779]: I0929 19:12:36.281297 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/040007e5-696d-4ee5-afaf-c68e56718a1c-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-555fcf5468-lj5l4\" (UID: \"040007e5-696d-4ee5-afaf-c68e56718a1c\") " pod="openshift-authentication/oauth-openshift-555fcf5468-lj5l4" Sep 29 19:12:36 crc kubenswrapper[4779]: I0929 19:12:36.281357 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/040007e5-696d-4ee5-afaf-c68e56718a1c-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-555fcf5468-lj5l4\" (UID: \"040007e5-696d-4ee5-afaf-c68e56718a1c\") " pod="openshift-authentication/oauth-openshift-555fcf5468-lj5l4" Sep 29 19:12:36 crc kubenswrapper[4779]: I0929 19:12:36.281382 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/040007e5-696d-4ee5-afaf-c68e56718a1c-v4-0-config-user-template-login\") pod \"oauth-openshift-555fcf5468-lj5l4\" (UID: \"040007e5-696d-4ee5-afaf-c68e56718a1c\") " pod="openshift-authentication/oauth-openshift-555fcf5468-lj5l4" Sep 29 19:12:36 crc kubenswrapper[4779]: I0929 19:12:36.281403 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/040007e5-696d-4ee5-afaf-c68e56718a1c-v4-0-config-system-service-ca\") pod \"oauth-openshift-555fcf5468-lj5l4\" (UID: \"040007e5-696d-4ee5-afaf-c68e56718a1c\") " pod="openshift-authentication/oauth-openshift-555fcf5468-lj5l4" Sep 29 19:12:36 crc kubenswrapper[4779]: I0929 19:12:36.281440 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/040007e5-696d-4ee5-afaf-c68e56718a1c-audit-policies\") pod \"oauth-openshift-555fcf5468-lj5l4\" (UID: \"040007e5-696d-4ee5-afaf-c68e56718a1c\") " pod="openshift-authentication/oauth-openshift-555fcf5468-lj5l4" Sep 29 19:12:36 crc kubenswrapper[4779]: I0929 19:12:36.281459 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/040007e5-696d-4ee5-afaf-c68e56718a1c-v4-0-config-system-serving-cert\") pod \"oauth-openshift-555fcf5468-lj5l4\" (UID: \"040007e5-696d-4ee5-afaf-c68e56718a1c\") " pod="openshift-authentication/oauth-openshift-555fcf5468-lj5l4" Sep 29 19:12:36 crc kubenswrapper[4779]: I0929 19:12:36.281485 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/040007e5-696d-4ee5-afaf-c68e56718a1c-v4-0-config-system-session\") pod \"oauth-openshift-555fcf5468-lj5l4\" (UID: \"040007e5-696d-4ee5-afaf-c68e56718a1c\") " pod="openshift-authentication/oauth-openshift-555fcf5468-lj5l4" Sep 29 19:12:36 crc kubenswrapper[4779]: I0929 19:12:36.281841 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/29b802ef-670a-46bb-9ad3-03bddd7dc682-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "29b802ef-670a-46bb-9ad3-03bddd7dc682" (UID: "29b802ef-670a-46bb-9ad3-03bddd7dc682"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:12:36 crc kubenswrapper[4779]: I0929 19:12:36.282407 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/29b802ef-670a-46bb-9ad3-03bddd7dc682-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "29b802ef-670a-46bb-9ad3-03bddd7dc682" (UID: "29b802ef-670a-46bb-9ad3-03bddd7dc682"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:12:36 crc kubenswrapper[4779]: I0929 19:12:36.285514 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/29b802ef-670a-46bb-9ad3-03bddd7dc682-audit-dir" (OuterVolumeSpecName: "audit-dir") pod "29b802ef-670a-46bb-9ad3-03bddd7dc682" (UID: "29b802ef-670a-46bb-9ad3-03bddd7dc682"). InnerVolumeSpecName "audit-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 29 19:12:36 crc kubenswrapper[4779]: I0929 19:12:36.286881 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/29b802ef-670a-46bb-9ad3-03bddd7dc682-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "29b802ef-670a-46bb-9ad3-03bddd7dc682" (UID: "29b802ef-670a-46bb-9ad3-03bddd7dc682"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:12:36 crc kubenswrapper[4779]: I0929 19:12:36.287475 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/29b802ef-670a-46bb-9ad3-03bddd7dc682-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "29b802ef-670a-46bb-9ad3-03bddd7dc682" (UID: "29b802ef-670a-46bb-9ad3-03bddd7dc682"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:12:36 crc kubenswrapper[4779]: I0929 19:12:36.290581 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/29b802ef-670a-46bb-9ad3-03bddd7dc682-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "29b802ef-670a-46bb-9ad3-03bddd7dc682" (UID: "29b802ef-670a-46bb-9ad3-03bddd7dc682"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:12:36 crc kubenswrapper[4779]: I0929 19:12:36.293443 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/29b802ef-670a-46bb-9ad3-03bddd7dc682-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "29b802ef-670a-46bb-9ad3-03bddd7dc682" (UID: "29b802ef-670a-46bb-9ad3-03bddd7dc682"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:12:36 crc kubenswrapper[4779]: I0929 19:12:36.293827 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/29b802ef-670a-46bb-9ad3-03bddd7dc682-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "29b802ef-670a-46bb-9ad3-03bddd7dc682" (UID: "29b802ef-670a-46bb-9ad3-03bddd7dc682"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:12:36 crc kubenswrapper[4779]: I0929 19:12:36.298751 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/29b802ef-670a-46bb-9ad3-03bddd7dc682-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "29b802ef-670a-46bb-9ad3-03bddd7dc682" (UID: "29b802ef-670a-46bb-9ad3-03bddd7dc682"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:12:36 crc kubenswrapper[4779]: I0929 19:12:36.299007 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/29b802ef-670a-46bb-9ad3-03bddd7dc682-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "29b802ef-670a-46bb-9ad3-03bddd7dc682" (UID: "29b802ef-670a-46bb-9ad3-03bddd7dc682"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:12:36 crc kubenswrapper[4779]: I0929 19:12:36.301631 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/29b802ef-670a-46bb-9ad3-03bddd7dc682-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "29b802ef-670a-46bb-9ad3-03bddd7dc682" (UID: "29b802ef-670a-46bb-9ad3-03bddd7dc682"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:12:36 crc kubenswrapper[4779]: I0929 19:12:36.302424 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/29b802ef-670a-46bb-9ad3-03bddd7dc682-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "29b802ef-670a-46bb-9ad3-03bddd7dc682" (UID: "29b802ef-670a-46bb-9ad3-03bddd7dc682"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:12:36 crc kubenswrapper[4779]: I0929 19:12:36.305447 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/29b802ef-670a-46bb-9ad3-03bddd7dc682-kube-api-access-q9n87" (OuterVolumeSpecName: "kube-api-access-q9n87") pod "29b802ef-670a-46bb-9ad3-03bddd7dc682" (UID: "29b802ef-670a-46bb-9ad3-03bddd7dc682"). InnerVolumeSpecName "kube-api-access-q9n87". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:12:36 crc kubenswrapper[4779]: I0929 19:12:36.305907 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/29b802ef-670a-46bb-9ad3-03bddd7dc682-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "29b802ef-670a-46bb-9ad3-03bddd7dc682" (UID: "29b802ef-670a-46bb-9ad3-03bddd7dc682"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:12:36 crc kubenswrapper[4779]: I0929 19:12:36.382435 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/040007e5-696d-4ee5-afaf-c68e56718a1c-audit-policies\") pod \"oauth-openshift-555fcf5468-lj5l4\" (UID: \"040007e5-696d-4ee5-afaf-c68e56718a1c\") " pod="openshift-authentication/oauth-openshift-555fcf5468-lj5l4" Sep 29 19:12:36 crc kubenswrapper[4779]: I0929 19:12:36.382499 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/040007e5-696d-4ee5-afaf-c68e56718a1c-v4-0-config-system-serving-cert\") pod \"oauth-openshift-555fcf5468-lj5l4\" (UID: \"040007e5-696d-4ee5-afaf-c68e56718a1c\") " pod="openshift-authentication/oauth-openshift-555fcf5468-lj5l4" Sep 29 19:12:36 crc kubenswrapper[4779]: I0929 19:12:36.382539 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/040007e5-696d-4ee5-afaf-c68e56718a1c-v4-0-config-system-session\") pod \"oauth-openshift-555fcf5468-lj5l4\" (UID: \"040007e5-696d-4ee5-afaf-c68e56718a1c\") " pod="openshift-authentication/oauth-openshift-555fcf5468-lj5l4" Sep 29 19:12:36 crc kubenswrapper[4779]: I0929 19:12:36.382592 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/040007e5-696d-4ee5-afaf-c68e56718a1c-v4-0-config-user-template-error\") pod \"oauth-openshift-555fcf5468-lj5l4\" (UID: \"040007e5-696d-4ee5-afaf-c68e56718a1c\") " pod="openshift-authentication/oauth-openshift-555fcf5468-lj5l4" Sep 29 19:12:36 crc kubenswrapper[4779]: I0929 19:12:36.382621 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/040007e5-696d-4ee5-afaf-c68e56718a1c-audit-dir\") pod \"oauth-openshift-555fcf5468-lj5l4\" (UID: \"040007e5-696d-4ee5-afaf-c68e56718a1c\") " pod="openshift-authentication/oauth-openshift-555fcf5468-lj5l4" Sep 29 19:12:36 crc kubenswrapper[4779]: I0929 19:12:36.382663 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/040007e5-696d-4ee5-afaf-c68e56718a1c-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-555fcf5468-lj5l4\" (UID: \"040007e5-696d-4ee5-afaf-c68e56718a1c\") " pod="openshift-authentication/oauth-openshift-555fcf5468-lj5l4" Sep 29 19:12:36 crc kubenswrapper[4779]: I0929 19:12:36.382688 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/040007e5-696d-4ee5-afaf-c68e56718a1c-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-555fcf5468-lj5l4\" (UID: \"040007e5-696d-4ee5-afaf-c68e56718a1c\") " pod="openshift-authentication/oauth-openshift-555fcf5468-lj5l4" Sep 29 19:12:36 crc kubenswrapper[4779]: I0929 19:12:36.382711 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dn6qv\" (UniqueName: \"kubernetes.io/projected/040007e5-696d-4ee5-afaf-c68e56718a1c-kube-api-access-dn6qv\") pod \"oauth-openshift-555fcf5468-lj5l4\" (UID: \"040007e5-696d-4ee5-afaf-c68e56718a1c\") " pod="openshift-authentication/oauth-openshift-555fcf5468-lj5l4" Sep 29 19:12:36 crc kubenswrapper[4779]: I0929 19:12:36.382745 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/040007e5-696d-4ee5-afaf-c68e56718a1c-v4-0-config-system-router-certs\") pod \"oauth-openshift-555fcf5468-lj5l4\" (UID: \"040007e5-696d-4ee5-afaf-c68e56718a1c\") " pod="openshift-authentication/oauth-openshift-555fcf5468-lj5l4" Sep 29 19:12:36 crc kubenswrapper[4779]: I0929 19:12:36.382770 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/040007e5-696d-4ee5-afaf-c68e56718a1c-v4-0-config-system-cliconfig\") pod \"oauth-openshift-555fcf5468-lj5l4\" (UID: \"040007e5-696d-4ee5-afaf-c68e56718a1c\") " pod="openshift-authentication/oauth-openshift-555fcf5468-lj5l4" Sep 29 19:12:36 crc kubenswrapper[4779]: I0929 19:12:36.382796 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/040007e5-696d-4ee5-afaf-c68e56718a1c-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-555fcf5468-lj5l4\" (UID: \"040007e5-696d-4ee5-afaf-c68e56718a1c\") " pod="openshift-authentication/oauth-openshift-555fcf5468-lj5l4" Sep 29 19:12:36 crc kubenswrapper[4779]: I0929 19:12:36.382835 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/040007e5-696d-4ee5-afaf-c68e56718a1c-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-555fcf5468-lj5l4\" (UID: \"040007e5-696d-4ee5-afaf-c68e56718a1c\") " pod="openshift-authentication/oauth-openshift-555fcf5468-lj5l4" Sep 29 19:12:36 crc kubenswrapper[4779]: I0929 19:12:36.382859 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/040007e5-696d-4ee5-afaf-c68e56718a1c-v4-0-config-user-template-login\") pod \"oauth-openshift-555fcf5468-lj5l4\" (UID: \"040007e5-696d-4ee5-afaf-c68e56718a1c\") " pod="openshift-authentication/oauth-openshift-555fcf5468-lj5l4" Sep 29 19:12:36 crc kubenswrapper[4779]: I0929 19:12:36.382884 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/040007e5-696d-4ee5-afaf-c68e56718a1c-v4-0-config-system-service-ca\") pod \"oauth-openshift-555fcf5468-lj5l4\" (UID: \"040007e5-696d-4ee5-afaf-c68e56718a1c\") " pod="openshift-authentication/oauth-openshift-555fcf5468-lj5l4" Sep 29 19:12:36 crc kubenswrapper[4779]: I0929 19:12:36.382934 4779 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/29b802ef-670a-46bb-9ad3-03bddd7dc682-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 19:12:36 crc kubenswrapper[4779]: I0929 19:12:36.382949 4779 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/29b802ef-670a-46bb-9ad3-03bddd7dc682-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Sep 29 19:12:36 crc kubenswrapper[4779]: I0929 19:12:36.382966 4779 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/29b802ef-670a-46bb-9ad3-03bddd7dc682-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Sep 29 19:12:36 crc kubenswrapper[4779]: I0929 19:12:36.382982 4779 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/29b802ef-670a-46bb-9ad3-03bddd7dc682-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 29 19:12:36 crc kubenswrapper[4779]: I0929 19:12:36.382995 4779 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/29b802ef-670a-46bb-9ad3-03bddd7dc682-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Sep 29 19:12:36 crc kubenswrapper[4779]: I0929 19:12:36.383008 4779 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/29b802ef-670a-46bb-9ad3-03bddd7dc682-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Sep 29 19:12:36 crc kubenswrapper[4779]: I0929 19:12:36.383023 4779 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/29b802ef-670a-46bb-9ad3-03bddd7dc682-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Sep 29 19:12:36 crc kubenswrapper[4779]: I0929 19:12:36.383036 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-q9n87\" (UniqueName: \"kubernetes.io/projected/29b802ef-670a-46bb-9ad3-03bddd7dc682-kube-api-access-q9n87\") on node \"crc\" DevicePath \"\"" Sep 29 19:12:36 crc kubenswrapper[4779]: I0929 19:12:36.383048 4779 reconciler_common.go:293] "Volume detached for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/29b802ef-670a-46bb-9ad3-03bddd7dc682-audit-dir\") on node \"crc\" DevicePath \"\"" Sep 29 19:12:36 crc kubenswrapper[4779]: I0929 19:12:36.383060 4779 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/29b802ef-670a-46bb-9ad3-03bddd7dc682-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Sep 29 19:12:36 crc kubenswrapper[4779]: I0929 19:12:36.383072 4779 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/29b802ef-670a-46bb-9ad3-03bddd7dc682-audit-policies\") on node \"crc\" DevicePath \"\"" Sep 29 19:12:36 crc kubenswrapper[4779]: I0929 19:12:36.383084 4779 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/29b802ef-670a-46bb-9ad3-03bddd7dc682-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Sep 29 19:12:36 crc kubenswrapper[4779]: I0929 19:12:36.383096 4779 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/29b802ef-670a-46bb-9ad3-03bddd7dc682-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Sep 29 19:12:36 crc kubenswrapper[4779]: I0929 19:12:36.383108 4779 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/29b802ef-670a-46bb-9ad3-03bddd7dc682-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Sep 29 19:12:36 crc kubenswrapper[4779]: I0929 19:12:36.383177 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/040007e5-696d-4ee5-afaf-c68e56718a1c-audit-policies\") pod \"oauth-openshift-555fcf5468-lj5l4\" (UID: \"040007e5-696d-4ee5-afaf-c68e56718a1c\") " pod="openshift-authentication/oauth-openshift-555fcf5468-lj5l4" Sep 29 19:12:36 crc kubenswrapper[4779]: I0929 19:12:36.382761 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/040007e5-696d-4ee5-afaf-c68e56718a1c-audit-dir\") pod \"oauth-openshift-555fcf5468-lj5l4\" (UID: \"040007e5-696d-4ee5-afaf-c68e56718a1c\") " pod="openshift-authentication/oauth-openshift-555fcf5468-lj5l4" Sep 29 19:12:36 crc kubenswrapper[4779]: I0929 19:12:36.383968 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/040007e5-696d-4ee5-afaf-c68e56718a1c-v4-0-config-system-service-ca\") pod \"oauth-openshift-555fcf5468-lj5l4\" (UID: \"040007e5-696d-4ee5-afaf-c68e56718a1c\") " pod="openshift-authentication/oauth-openshift-555fcf5468-lj5l4" Sep 29 19:12:36 crc kubenswrapper[4779]: I0929 19:12:36.384481 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/040007e5-696d-4ee5-afaf-c68e56718a1c-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-555fcf5468-lj5l4\" (UID: \"040007e5-696d-4ee5-afaf-c68e56718a1c\") " pod="openshift-authentication/oauth-openshift-555fcf5468-lj5l4" Sep 29 19:12:36 crc kubenswrapper[4779]: I0929 19:12:36.385148 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/040007e5-696d-4ee5-afaf-c68e56718a1c-v4-0-config-system-cliconfig\") pod \"oauth-openshift-555fcf5468-lj5l4\" (UID: \"040007e5-696d-4ee5-afaf-c68e56718a1c\") " pod="openshift-authentication/oauth-openshift-555fcf5468-lj5l4" Sep 29 19:12:36 crc kubenswrapper[4779]: I0929 19:12:36.386908 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/040007e5-696d-4ee5-afaf-c68e56718a1c-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-555fcf5468-lj5l4\" (UID: \"040007e5-696d-4ee5-afaf-c68e56718a1c\") " pod="openshift-authentication/oauth-openshift-555fcf5468-lj5l4" Sep 29 19:12:36 crc kubenswrapper[4779]: I0929 19:12:36.387965 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/040007e5-696d-4ee5-afaf-c68e56718a1c-v4-0-config-system-router-certs\") pod \"oauth-openshift-555fcf5468-lj5l4\" (UID: \"040007e5-696d-4ee5-afaf-c68e56718a1c\") " pod="openshift-authentication/oauth-openshift-555fcf5468-lj5l4" Sep 29 19:12:36 crc kubenswrapper[4779]: I0929 19:12:36.388829 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/040007e5-696d-4ee5-afaf-c68e56718a1c-v4-0-config-system-serving-cert\") pod \"oauth-openshift-555fcf5468-lj5l4\" (UID: \"040007e5-696d-4ee5-afaf-c68e56718a1c\") " pod="openshift-authentication/oauth-openshift-555fcf5468-lj5l4" Sep 29 19:12:36 crc kubenswrapper[4779]: I0929 19:12:36.389064 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/040007e5-696d-4ee5-afaf-c68e56718a1c-v4-0-config-system-session\") pod \"oauth-openshift-555fcf5468-lj5l4\" (UID: \"040007e5-696d-4ee5-afaf-c68e56718a1c\") " pod="openshift-authentication/oauth-openshift-555fcf5468-lj5l4" Sep 29 19:12:36 crc kubenswrapper[4779]: I0929 19:12:36.389561 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/040007e5-696d-4ee5-afaf-c68e56718a1c-v4-0-config-user-template-error\") pod \"oauth-openshift-555fcf5468-lj5l4\" (UID: \"040007e5-696d-4ee5-afaf-c68e56718a1c\") " pod="openshift-authentication/oauth-openshift-555fcf5468-lj5l4" Sep 29 19:12:36 crc kubenswrapper[4779]: I0929 19:12:36.389614 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/040007e5-696d-4ee5-afaf-c68e56718a1c-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-555fcf5468-lj5l4\" (UID: \"040007e5-696d-4ee5-afaf-c68e56718a1c\") " pod="openshift-authentication/oauth-openshift-555fcf5468-lj5l4" Sep 29 19:12:36 crc kubenswrapper[4779]: I0929 19:12:36.390727 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/040007e5-696d-4ee5-afaf-c68e56718a1c-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-555fcf5468-lj5l4\" (UID: \"040007e5-696d-4ee5-afaf-c68e56718a1c\") " pod="openshift-authentication/oauth-openshift-555fcf5468-lj5l4" Sep 29 19:12:36 crc kubenswrapper[4779]: I0929 19:12:36.393365 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/040007e5-696d-4ee5-afaf-c68e56718a1c-v4-0-config-user-template-login\") pod \"oauth-openshift-555fcf5468-lj5l4\" (UID: \"040007e5-696d-4ee5-afaf-c68e56718a1c\") " pod="openshift-authentication/oauth-openshift-555fcf5468-lj5l4" Sep 29 19:12:36 crc kubenswrapper[4779]: I0929 19:12:36.410723 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dn6qv\" (UniqueName: \"kubernetes.io/projected/040007e5-696d-4ee5-afaf-c68e56718a1c-kube-api-access-dn6qv\") pod \"oauth-openshift-555fcf5468-lj5l4\" (UID: \"040007e5-696d-4ee5-afaf-c68e56718a1c\") " pod="openshift-authentication/oauth-openshift-555fcf5468-lj5l4" Sep 29 19:12:36 crc kubenswrapper[4779]: I0929 19:12:36.561991 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-555fcf5468-lj5l4" Sep 29 19:12:36 crc kubenswrapper[4779]: I0929 19:12:36.732934 4779 generic.go:334] "Generic (PLEG): container finished" podID="29b802ef-670a-46bb-9ad3-03bddd7dc682" containerID="13296d5bc1c28085d42c4d36090b3dc006a5af303010df3d36798f8679946979" exitCode=0 Sep 29 19:12:36 crc kubenswrapper[4779]: I0929 19:12:36.733015 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-pjjch" event={"ID":"29b802ef-670a-46bb-9ad3-03bddd7dc682","Type":"ContainerDied","Data":"13296d5bc1c28085d42c4d36090b3dc006a5af303010df3d36798f8679946979"} Sep 29 19:12:36 crc kubenswrapper[4779]: I0929 19:12:36.733132 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-pjjch" event={"ID":"29b802ef-670a-46bb-9ad3-03bddd7dc682","Type":"ContainerDied","Data":"0bfb210ed5a26c042ab13a2be45b552c5c63cca89f2b16d9380f7e2f51bd54dc"} Sep 29 19:12:36 crc kubenswrapper[4779]: I0929 19:12:36.733165 4779 scope.go:117] "RemoveContainer" containerID="13296d5bc1c28085d42c4d36090b3dc006a5af303010df3d36798f8679946979" Sep 29 19:12:36 crc kubenswrapper[4779]: I0929 19:12:36.733415 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-pjjch" Sep 29 19:12:36 crc kubenswrapper[4779]: I0929 19:12:36.757945 4779 scope.go:117] "RemoveContainer" containerID="13296d5bc1c28085d42c4d36090b3dc006a5af303010df3d36798f8679946979" Sep 29 19:12:36 crc kubenswrapper[4779]: E0929 19:12:36.760215 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"13296d5bc1c28085d42c4d36090b3dc006a5af303010df3d36798f8679946979\": container with ID starting with 13296d5bc1c28085d42c4d36090b3dc006a5af303010df3d36798f8679946979 not found: ID does not exist" containerID="13296d5bc1c28085d42c4d36090b3dc006a5af303010df3d36798f8679946979" Sep 29 19:12:36 crc kubenswrapper[4779]: I0929 19:12:36.760274 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"13296d5bc1c28085d42c4d36090b3dc006a5af303010df3d36798f8679946979"} err="failed to get container status \"13296d5bc1c28085d42c4d36090b3dc006a5af303010df3d36798f8679946979\": rpc error: code = NotFound desc = could not find container \"13296d5bc1c28085d42c4d36090b3dc006a5af303010df3d36798f8679946979\": container with ID starting with 13296d5bc1c28085d42c4d36090b3dc006a5af303010df3d36798f8679946979 not found: ID does not exist" Sep 29 19:12:36 crc kubenswrapper[4779]: I0929 19:12:36.797067 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-pjjch"] Sep 29 19:12:36 crc kubenswrapper[4779]: I0929 19:12:36.800684 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-pjjch"] Sep 29 19:12:37 crc kubenswrapper[4779]: I0929 19:12:37.040747 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-555fcf5468-lj5l4"] Sep 29 19:12:37 crc kubenswrapper[4779]: I0929 19:12:37.742999 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-555fcf5468-lj5l4" event={"ID":"040007e5-696d-4ee5-afaf-c68e56718a1c","Type":"ContainerStarted","Data":"2e1cab23478a916e82e51fe4a1d6888c36205b1e22dcf95d17a5e65eb2a14260"} Sep 29 19:12:37 crc kubenswrapper[4779]: I0929 19:12:37.743088 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-555fcf5468-lj5l4" event={"ID":"040007e5-696d-4ee5-afaf-c68e56718a1c","Type":"ContainerStarted","Data":"2a1d9168f9d95f39e69155419dfdefc8d0015ef29ca2dabdcf6cf5a7bf9c6c3f"} Sep 29 19:12:37 crc kubenswrapper[4779]: I0929 19:12:37.743361 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-555fcf5468-lj5l4" Sep 29 19:12:37 crc kubenswrapper[4779]: I0929 19:12:37.766427 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-555fcf5468-lj5l4" podStartSLOduration=27.7664055 podStartE2EDuration="27.7664055s" podCreationTimestamp="2025-09-29 19:12:10 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 19:12:37.763825457 +0000 UTC m=+268.648250617" watchObservedRunningTime="2025-09-29 19:12:37.7664055 +0000 UTC m=+268.650830600" Sep 29 19:12:37 crc kubenswrapper[4779]: I0929 19:12:37.791749 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="29b802ef-670a-46bb-9ad3-03bddd7dc682" path="/var/lib/kubelet/pods/29b802ef-670a-46bb-9ad3-03bddd7dc682/volumes" Sep 29 19:12:37 crc kubenswrapper[4779]: I0929 19:12:37.899797 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-555fcf5468-lj5l4" Sep 29 19:12:53 crc kubenswrapper[4779]: I0929 19:12:53.652697 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-rxml4"] Sep 29 19:12:53 crc kubenswrapper[4779]: I0929 19:12:53.653375 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-rxml4" podUID="79af1e0e-dad6-413a-80ea-699ef7f5cdad" containerName="registry-server" containerID="cri-o://ffe9bf7d456cfc770cba37cf963bbaebf117efb7d01061fbd5f40e061c190c07" gracePeriod=30 Sep 29 19:12:53 crc kubenswrapper[4779]: I0929 19:12:53.655635 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-4b959"] Sep 29 19:12:53 crc kubenswrapper[4779]: I0929 19:12:53.656034 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-4b959" podUID="6cf06f8e-1088-4c70-809f-c4b1a55e96e5" containerName="registry-server" containerID="cri-o://7dc6b099204da081e3fd6cc015737a917db7d93444374f97282895ab0ef89e8d" gracePeriod=30 Sep 29 19:12:53 crc kubenswrapper[4779]: I0929 19:12:53.661790 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-bt79h"] Sep 29 19:12:53 crc kubenswrapper[4779]: I0929 19:12:53.662003 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/marketplace-operator-79b997595-bt79h" podUID="ec0b9d1c-6776-4434-9a56-756a3fc1fc5e" containerName="marketplace-operator" containerID="cri-o://149337cc4c25d47ca0668834e7e6ec91e5e106613863c5b03a3e0a8b2f2d7ed3" gracePeriod=30 Sep 29 19:12:53 crc kubenswrapper[4779]: I0929 19:12:53.673763 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-8ncmv"] Sep 29 19:12:53 crc kubenswrapper[4779]: I0929 19:12:53.674118 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-8ncmv" podUID="37942c99-d186-454f-9d72-04ec19e7c737" containerName="registry-server" containerID="cri-o://1123e5bd8b660916fee65d2faee7d6ff0345ddfa9bd3e10d0e8cc9e4048efd77" gracePeriod=30 Sep 29 19:12:53 crc kubenswrapper[4779]: I0929 19:12:53.680185 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-vcrlk"] Sep 29 19:12:53 crc kubenswrapper[4779]: I0929 19:12:53.681741 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-vcrlk" Sep 29 19:12:53 crc kubenswrapper[4779]: I0929 19:12:53.692510 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-4nts8"] Sep 29 19:12:53 crc kubenswrapper[4779]: I0929 19:12:53.692729 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-4nts8" podUID="cb7eba8f-ce89-4f64-a05a-d73d0c3b630b" containerName="registry-server" containerID="cri-o://0f5d45d2042f252e7caa75a1e1f5e74df23aeb7e6c0d7db3e2c7bc9d4eac333c" gracePeriod=30 Sep 29 19:12:53 crc kubenswrapper[4779]: I0929 19:12:53.703971 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-vcrlk"] Sep 29 19:12:53 crc kubenswrapper[4779]: I0929 19:12:53.729944 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-54tbp\" (UniqueName: \"kubernetes.io/projected/378c23da-08aa-4f09-9171-29a4f81908bb-kube-api-access-54tbp\") pod \"marketplace-operator-79b997595-vcrlk\" (UID: \"378c23da-08aa-4f09-9171-29a4f81908bb\") " pod="openshift-marketplace/marketplace-operator-79b997595-vcrlk" Sep 29 19:12:53 crc kubenswrapper[4779]: I0929 19:12:53.729995 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/378c23da-08aa-4f09-9171-29a4f81908bb-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-vcrlk\" (UID: \"378c23da-08aa-4f09-9171-29a4f81908bb\") " pod="openshift-marketplace/marketplace-operator-79b997595-vcrlk" Sep 29 19:12:53 crc kubenswrapper[4779]: I0929 19:12:53.730124 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/378c23da-08aa-4f09-9171-29a4f81908bb-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-vcrlk\" (UID: \"378c23da-08aa-4f09-9171-29a4f81908bb\") " pod="openshift-marketplace/marketplace-operator-79b997595-vcrlk" Sep 29 19:12:53 crc kubenswrapper[4779]: I0929 19:12:53.830976 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/378c23da-08aa-4f09-9171-29a4f81908bb-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-vcrlk\" (UID: \"378c23da-08aa-4f09-9171-29a4f81908bb\") " pod="openshift-marketplace/marketplace-operator-79b997595-vcrlk" Sep 29 19:12:53 crc kubenswrapper[4779]: I0929 19:12:53.831082 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-54tbp\" (UniqueName: \"kubernetes.io/projected/378c23da-08aa-4f09-9171-29a4f81908bb-kube-api-access-54tbp\") pod \"marketplace-operator-79b997595-vcrlk\" (UID: \"378c23da-08aa-4f09-9171-29a4f81908bb\") " pod="openshift-marketplace/marketplace-operator-79b997595-vcrlk" Sep 29 19:12:53 crc kubenswrapper[4779]: I0929 19:12:53.831117 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/378c23da-08aa-4f09-9171-29a4f81908bb-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-vcrlk\" (UID: \"378c23da-08aa-4f09-9171-29a4f81908bb\") " pod="openshift-marketplace/marketplace-operator-79b997595-vcrlk" Sep 29 19:12:53 crc kubenswrapper[4779]: I0929 19:12:53.834010 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/378c23da-08aa-4f09-9171-29a4f81908bb-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-vcrlk\" (UID: \"378c23da-08aa-4f09-9171-29a4f81908bb\") " pod="openshift-marketplace/marketplace-operator-79b997595-vcrlk" Sep 29 19:12:53 crc kubenswrapper[4779]: I0929 19:12:53.836924 4779 generic.go:334] "Generic (PLEG): container finished" podID="37942c99-d186-454f-9d72-04ec19e7c737" containerID="1123e5bd8b660916fee65d2faee7d6ff0345ddfa9bd3e10d0e8cc9e4048efd77" exitCode=0 Sep 29 19:12:53 crc kubenswrapper[4779]: I0929 19:12:53.837010 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-8ncmv" event={"ID":"37942c99-d186-454f-9d72-04ec19e7c737","Type":"ContainerDied","Data":"1123e5bd8b660916fee65d2faee7d6ff0345ddfa9bd3e10d0e8cc9e4048efd77"} Sep 29 19:12:53 crc kubenswrapper[4779]: I0929 19:12:53.838783 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/378c23da-08aa-4f09-9171-29a4f81908bb-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-vcrlk\" (UID: \"378c23da-08aa-4f09-9171-29a4f81908bb\") " pod="openshift-marketplace/marketplace-operator-79b997595-vcrlk" Sep 29 19:12:53 crc kubenswrapper[4779]: I0929 19:12:53.839035 4779 generic.go:334] "Generic (PLEG): container finished" podID="ec0b9d1c-6776-4434-9a56-756a3fc1fc5e" containerID="149337cc4c25d47ca0668834e7e6ec91e5e106613863c5b03a3e0a8b2f2d7ed3" exitCode=0 Sep 29 19:12:53 crc kubenswrapper[4779]: I0929 19:12:53.839131 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-bt79h" event={"ID":"ec0b9d1c-6776-4434-9a56-756a3fc1fc5e","Type":"ContainerDied","Data":"149337cc4c25d47ca0668834e7e6ec91e5e106613863c5b03a3e0a8b2f2d7ed3"} Sep 29 19:12:53 crc kubenswrapper[4779]: I0929 19:12:53.841689 4779 generic.go:334] "Generic (PLEG): container finished" podID="79af1e0e-dad6-413a-80ea-699ef7f5cdad" containerID="ffe9bf7d456cfc770cba37cf963bbaebf117efb7d01061fbd5f40e061c190c07" exitCode=0 Sep 29 19:12:53 crc kubenswrapper[4779]: I0929 19:12:53.841711 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-rxml4" event={"ID":"79af1e0e-dad6-413a-80ea-699ef7f5cdad","Type":"ContainerDied","Data":"ffe9bf7d456cfc770cba37cf963bbaebf117efb7d01061fbd5f40e061c190c07"} Sep 29 19:12:53 crc kubenswrapper[4779]: I0929 19:12:53.851162 4779 generic.go:334] "Generic (PLEG): container finished" podID="cb7eba8f-ce89-4f64-a05a-d73d0c3b630b" containerID="0f5d45d2042f252e7caa75a1e1f5e74df23aeb7e6c0d7db3e2c7bc9d4eac333c" exitCode=0 Sep 29 19:12:53 crc kubenswrapper[4779]: I0929 19:12:53.851241 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-4nts8" event={"ID":"cb7eba8f-ce89-4f64-a05a-d73d0c3b630b","Type":"ContainerDied","Data":"0f5d45d2042f252e7caa75a1e1f5e74df23aeb7e6c0d7db3e2c7bc9d4eac333c"} Sep 29 19:12:53 crc kubenswrapper[4779]: I0929 19:12:53.853937 4779 generic.go:334] "Generic (PLEG): container finished" podID="6cf06f8e-1088-4c70-809f-c4b1a55e96e5" containerID="7dc6b099204da081e3fd6cc015737a917db7d93444374f97282895ab0ef89e8d" exitCode=0 Sep 29 19:12:53 crc kubenswrapper[4779]: I0929 19:12:53.853958 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-4b959" event={"ID":"6cf06f8e-1088-4c70-809f-c4b1a55e96e5","Type":"ContainerDied","Data":"7dc6b099204da081e3fd6cc015737a917db7d93444374f97282895ab0ef89e8d"} Sep 29 19:12:53 crc kubenswrapper[4779]: I0929 19:12:53.856012 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-54tbp\" (UniqueName: \"kubernetes.io/projected/378c23da-08aa-4f09-9171-29a4f81908bb-kube-api-access-54tbp\") pod \"marketplace-operator-79b997595-vcrlk\" (UID: \"378c23da-08aa-4f09-9171-29a4f81908bb\") " pod="openshift-marketplace/marketplace-operator-79b997595-vcrlk" Sep 29 19:12:54 crc kubenswrapper[4779]: I0929 19:12:54.010404 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-vcrlk" Sep 29 19:12:54 crc kubenswrapper[4779]: I0929 19:12:54.077785 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-4b959" Sep 29 19:12:54 crc kubenswrapper[4779]: I0929 19:12:54.079609 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-bt79h" Sep 29 19:12:54 crc kubenswrapper[4779]: I0929 19:12:54.093504 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-8ncmv" Sep 29 19:12:54 crc kubenswrapper[4779]: I0929 19:12:54.104163 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-rxml4" Sep 29 19:12:54 crc kubenswrapper[4779]: I0929 19:12:54.134068 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-4nts8" Sep 29 19:12:54 crc kubenswrapper[4779]: I0929 19:12:54.135595 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/ec0b9d1c-6776-4434-9a56-756a3fc1fc5e-marketplace-trusted-ca\") pod \"ec0b9d1c-6776-4434-9a56-756a3fc1fc5e\" (UID: \"ec0b9d1c-6776-4434-9a56-756a3fc1fc5e\") " Sep 29 19:12:54 crc kubenswrapper[4779]: I0929 19:12:54.137788 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ec0b9d1c-6776-4434-9a56-756a3fc1fc5e-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "ec0b9d1c-6776-4434-9a56-756a3fc1fc5e" (UID: "ec0b9d1c-6776-4434-9a56-756a3fc1fc5e"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:12:54 crc kubenswrapper[4779]: I0929 19:12:54.138471 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6cf06f8e-1088-4c70-809f-c4b1a55e96e5-utilities\") pod \"6cf06f8e-1088-4c70-809f-c4b1a55e96e5\" (UID: \"6cf06f8e-1088-4c70-809f-c4b1a55e96e5\") " Sep 29 19:12:54 crc kubenswrapper[4779]: I0929 19:12:54.138502 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/37942c99-d186-454f-9d72-04ec19e7c737-catalog-content\") pod \"37942c99-d186-454f-9d72-04ec19e7c737\" (UID: \"37942c99-d186-454f-9d72-04ec19e7c737\") " Sep 29 19:12:54 crc kubenswrapper[4779]: I0929 19:12:54.138611 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/79af1e0e-dad6-413a-80ea-699ef7f5cdad-utilities\") pod \"79af1e0e-dad6-413a-80ea-699ef7f5cdad\" (UID: \"79af1e0e-dad6-413a-80ea-699ef7f5cdad\") " Sep 29 19:12:54 crc kubenswrapper[4779]: I0929 19:12:54.139024 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/37942c99-d186-454f-9d72-04ec19e7c737-utilities\") pod \"37942c99-d186-454f-9d72-04ec19e7c737\" (UID: \"37942c99-d186-454f-9d72-04ec19e7c737\") " Sep 29 19:12:54 crc kubenswrapper[4779]: I0929 19:12:54.139200 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hct62\" (UniqueName: \"kubernetes.io/projected/79af1e0e-dad6-413a-80ea-699ef7f5cdad-kube-api-access-hct62\") pod \"79af1e0e-dad6-413a-80ea-699ef7f5cdad\" (UID: \"79af1e0e-dad6-413a-80ea-699ef7f5cdad\") " Sep 29 19:12:54 crc kubenswrapper[4779]: I0929 19:12:54.139324 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6cf06f8e-1088-4c70-809f-c4b1a55e96e5-utilities" (OuterVolumeSpecName: "utilities") pod "6cf06f8e-1088-4c70-809f-c4b1a55e96e5" (UID: "6cf06f8e-1088-4c70-809f-c4b1a55e96e5"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 19:12:54 crc kubenswrapper[4779]: I0929 19:12:54.139405 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/79af1e0e-dad6-413a-80ea-699ef7f5cdad-catalog-content\") pod \"79af1e0e-dad6-413a-80ea-699ef7f5cdad\" (UID: \"79af1e0e-dad6-413a-80ea-699ef7f5cdad\") " Sep 29 19:12:54 crc kubenswrapper[4779]: I0929 19:12:54.139966 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/79af1e0e-dad6-413a-80ea-699ef7f5cdad-utilities" (OuterVolumeSpecName: "utilities") pod "79af1e0e-dad6-413a-80ea-699ef7f5cdad" (UID: "79af1e0e-dad6-413a-80ea-699ef7f5cdad"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 19:12:54 crc kubenswrapper[4779]: I0929 19:12:54.143397 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/37942c99-d186-454f-9d72-04ec19e7c737-utilities" (OuterVolumeSpecName: "utilities") pod "37942c99-d186-454f-9d72-04ec19e7c737" (UID: "37942c99-d186-454f-9d72-04ec19e7c737"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 19:12:54 crc kubenswrapper[4779]: I0929 19:12:54.145699 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/79af1e0e-dad6-413a-80ea-699ef7f5cdad-kube-api-access-hct62" (OuterVolumeSpecName: "kube-api-access-hct62") pod "79af1e0e-dad6-413a-80ea-699ef7f5cdad" (UID: "79af1e0e-dad6-413a-80ea-699ef7f5cdad"). InnerVolumeSpecName "kube-api-access-hct62". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:12:54 crc kubenswrapper[4779]: I0929 19:12:54.145865 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6cf06f8e-1088-4c70-809f-c4b1a55e96e5-kube-api-access-5nd9z" (OuterVolumeSpecName: "kube-api-access-5nd9z") pod "6cf06f8e-1088-4c70-809f-c4b1a55e96e5" (UID: "6cf06f8e-1088-4c70-809f-c4b1a55e96e5"). InnerVolumeSpecName "kube-api-access-5nd9z". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:12:54 crc kubenswrapper[4779]: I0929 19:12:54.146448 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5nd9z\" (UniqueName: \"kubernetes.io/projected/6cf06f8e-1088-4c70-809f-c4b1a55e96e5-kube-api-access-5nd9z\") pod \"6cf06f8e-1088-4c70-809f-c4b1a55e96e5\" (UID: \"6cf06f8e-1088-4c70-809f-c4b1a55e96e5\") " Sep 29 19:12:54 crc kubenswrapper[4779]: I0929 19:12:54.146514 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/ec0b9d1c-6776-4434-9a56-756a3fc1fc5e-marketplace-operator-metrics\") pod \"ec0b9d1c-6776-4434-9a56-756a3fc1fc5e\" (UID: \"ec0b9d1c-6776-4434-9a56-756a3fc1fc5e\") " Sep 29 19:12:54 crc kubenswrapper[4779]: I0929 19:12:54.146539 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-z5h6j\" (UniqueName: \"kubernetes.io/projected/ec0b9d1c-6776-4434-9a56-756a3fc1fc5e-kube-api-access-z5h6j\") pod \"ec0b9d1c-6776-4434-9a56-756a3fc1fc5e\" (UID: \"ec0b9d1c-6776-4434-9a56-756a3fc1fc5e\") " Sep 29 19:12:54 crc kubenswrapper[4779]: I0929 19:12:54.147032 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6cf06f8e-1088-4c70-809f-c4b1a55e96e5-catalog-content\") pod \"6cf06f8e-1088-4c70-809f-c4b1a55e96e5\" (UID: \"6cf06f8e-1088-4c70-809f-c4b1a55e96e5\") " Sep 29 19:12:54 crc kubenswrapper[4779]: I0929 19:12:54.147399 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d4jhx\" (UniqueName: \"kubernetes.io/projected/37942c99-d186-454f-9d72-04ec19e7c737-kube-api-access-d4jhx\") pod \"37942c99-d186-454f-9d72-04ec19e7c737\" (UID: \"37942c99-d186-454f-9d72-04ec19e7c737\") " Sep 29 19:12:54 crc kubenswrapper[4779]: I0929 19:12:54.152298 4779 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/ec0b9d1c-6776-4434-9a56-756a3fc1fc5e-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Sep 29 19:12:54 crc kubenswrapper[4779]: I0929 19:12:54.152365 4779 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6cf06f8e-1088-4c70-809f-c4b1a55e96e5-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 19:12:54 crc kubenswrapper[4779]: I0929 19:12:54.152440 4779 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/79af1e0e-dad6-413a-80ea-699ef7f5cdad-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 19:12:54 crc kubenswrapper[4779]: I0929 19:12:54.152456 4779 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/37942c99-d186-454f-9d72-04ec19e7c737-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 19:12:54 crc kubenswrapper[4779]: I0929 19:12:54.152471 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hct62\" (UniqueName: \"kubernetes.io/projected/79af1e0e-dad6-413a-80ea-699ef7f5cdad-kube-api-access-hct62\") on node \"crc\" DevicePath \"\"" Sep 29 19:12:54 crc kubenswrapper[4779]: I0929 19:12:54.152512 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5nd9z\" (UniqueName: \"kubernetes.io/projected/6cf06f8e-1088-4c70-809f-c4b1a55e96e5-kube-api-access-5nd9z\") on node \"crc\" DevicePath \"\"" Sep 29 19:12:54 crc kubenswrapper[4779]: I0929 19:12:54.153094 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ec0b9d1c-6776-4434-9a56-756a3fc1fc5e-kube-api-access-z5h6j" (OuterVolumeSpecName: "kube-api-access-z5h6j") pod "ec0b9d1c-6776-4434-9a56-756a3fc1fc5e" (UID: "ec0b9d1c-6776-4434-9a56-756a3fc1fc5e"). InnerVolumeSpecName "kube-api-access-z5h6j". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:12:54 crc kubenswrapper[4779]: I0929 19:12:54.156995 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/37942c99-d186-454f-9d72-04ec19e7c737-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "37942c99-d186-454f-9d72-04ec19e7c737" (UID: "37942c99-d186-454f-9d72-04ec19e7c737"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 19:12:54 crc kubenswrapper[4779]: I0929 19:12:54.157802 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ec0b9d1c-6776-4434-9a56-756a3fc1fc5e-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "ec0b9d1c-6776-4434-9a56-756a3fc1fc5e" (UID: "ec0b9d1c-6776-4434-9a56-756a3fc1fc5e"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:12:54 crc kubenswrapper[4779]: I0929 19:12:54.183836 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/37942c99-d186-454f-9d72-04ec19e7c737-kube-api-access-d4jhx" (OuterVolumeSpecName: "kube-api-access-d4jhx") pod "37942c99-d186-454f-9d72-04ec19e7c737" (UID: "37942c99-d186-454f-9d72-04ec19e7c737"). InnerVolumeSpecName "kube-api-access-d4jhx". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:12:54 crc kubenswrapper[4779]: I0929 19:12:54.232881 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6cf06f8e-1088-4c70-809f-c4b1a55e96e5-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "6cf06f8e-1088-4c70-809f-c4b1a55e96e5" (UID: "6cf06f8e-1088-4c70-809f-c4b1a55e96e5"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 19:12:54 crc kubenswrapper[4779]: I0929 19:12:54.253782 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-q565t\" (UniqueName: \"kubernetes.io/projected/cb7eba8f-ce89-4f64-a05a-d73d0c3b630b-kube-api-access-q565t\") pod \"cb7eba8f-ce89-4f64-a05a-d73d0c3b630b\" (UID: \"cb7eba8f-ce89-4f64-a05a-d73d0c3b630b\") " Sep 29 19:12:54 crc kubenswrapper[4779]: I0929 19:12:54.253865 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cb7eba8f-ce89-4f64-a05a-d73d0c3b630b-catalog-content\") pod \"cb7eba8f-ce89-4f64-a05a-d73d0c3b630b\" (UID: \"cb7eba8f-ce89-4f64-a05a-d73d0c3b630b\") " Sep 29 19:12:54 crc kubenswrapper[4779]: I0929 19:12:54.253932 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cb7eba8f-ce89-4f64-a05a-d73d0c3b630b-utilities\") pod \"cb7eba8f-ce89-4f64-a05a-d73d0c3b630b\" (UID: \"cb7eba8f-ce89-4f64-a05a-d73d0c3b630b\") " Sep 29 19:12:54 crc kubenswrapper[4779]: I0929 19:12:54.254116 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d4jhx\" (UniqueName: \"kubernetes.io/projected/37942c99-d186-454f-9d72-04ec19e7c737-kube-api-access-d4jhx\") on node \"crc\" DevicePath \"\"" Sep 29 19:12:54 crc kubenswrapper[4779]: I0929 19:12:54.254128 4779 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/37942c99-d186-454f-9d72-04ec19e7c737-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 19:12:54 crc kubenswrapper[4779]: I0929 19:12:54.254137 4779 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/ec0b9d1c-6776-4434-9a56-756a3fc1fc5e-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Sep 29 19:12:54 crc kubenswrapper[4779]: I0929 19:12:54.254147 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-z5h6j\" (UniqueName: \"kubernetes.io/projected/ec0b9d1c-6776-4434-9a56-756a3fc1fc5e-kube-api-access-z5h6j\") on node \"crc\" DevicePath \"\"" Sep 29 19:12:54 crc kubenswrapper[4779]: I0929 19:12:54.254156 4779 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6cf06f8e-1088-4c70-809f-c4b1a55e96e5-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 19:12:54 crc kubenswrapper[4779]: I0929 19:12:54.255483 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cb7eba8f-ce89-4f64-a05a-d73d0c3b630b-utilities" (OuterVolumeSpecName: "utilities") pod "cb7eba8f-ce89-4f64-a05a-d73d0c3b630b" (UID: "cb7eba8f-ce89-4f64-a05a-d73d0c3b630b"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 19:12:54 crc kubenswrapper[4779]: I0929 19:12:54.258675 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/79af1e0e-dad6-413a-80ea-699ef7f5cdad-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "79af1e0e-dad6-413a-80ea-699ef7f5cdad" (UID: "79af1e0e-dad6-413a-80ea-699ef7f5cdad"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 19:12:54 crc kubenswrapper[4779]: I0929 19:12:54.259884 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-vcrlk"] Sep 29 19:12:54 crc kubenswrapper[4779]: I0929 19:12:54.263839 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cb7eba8f-ce89-4f64-a05a-d73d0c3b630b-kube-api-access-q565t" (OuterVolumeSpecName: "kube-api-access-q565t") pod "cb7eba8f-ce89-4f64-a05a-d73d0c3b630b" (UID: "cb7eba8f-ce89-4f64-a05a-d73d0c3b630b"). InnerVolumeSpecName "kube-api-access-q565t". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:12:54 crc kubenswrapper[4779]: W0929 19:12:54.263859 4779 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod378c23da_08aa_4f09_9171_29a4f81908bb.slice/crio-f464615f58e45713a4db586e30de67ba5a063ea20e188a512d747eb95e01cf91 WatchSource:0}: Error finding container f464615f58e45713a4db586e30de67ba5a063ea20e188a512d747eb95e01cf91: Status 404 returned error can't find the container with id f464615f58e45713a4db586e30de67ba5a063ea20e188a512d747eb95e01cf91 Sep 29 19:12:54 crc kubenswrapper[4779]: I0929 19:12:54.338639 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cb7eba8f-ce89-4f64-a05a-d73d0c3b630b-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "cb7eba8f-ce89-4f64-a05a-d73d0c3b630b" (UID: "cb7eba8f-ce89-4f64-a05a-d73d0c3b630b"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 19:12:54 crc kubenswrapper[4779]: I0929 19:12:54.355998 4779 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/79af1e0e-dad6-413a-80ea-699ef7f5cdad-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 19:12:54 crc kubenswrapper[4779]: I0929 19:12:54.356020 4779 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cb7eba8f-ce89-4f64-a05a-d73d0c3b630b-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 19:12:54 crc kubenswrapper[4779]: I0929 19:12:54.356029 4779 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cb7eba8f-ce89-4f64-a05a-d73d0c3b630b-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 19:12:54 crc kubenswrapper[4779]: I0929 19:12:54.356038 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-q565t\" (UniqueName: \"kubernetes.io/projected/cb7eba8f-ce89-4f64-a05a-d73d0c3b630b-kube-api-access-q565t\") on node \"crc\" DevicePath \"\"" Sep 29 19:12:54 crc kubenswrapper[4779]: I0929 19:12:54.860718 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-vcrlk" event={"ID":"378c23da-08aa-4f09-9171-29a4f81908bb","Type":"ContainerStarted","Data":"432dca15ce42691e58604ea765f6e82b3a9c92be184eeac3e64c61ea32635a76"} Sep 29 19:12:54 crc kubenswrapper[4779]: I0929 19:12:54.860769 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-vcrlk" event={"ID":"378c23da-08aa-4f09-9171-29a4f81908bb","Type":"ContainerStarted","Data":"f464615f58e45713a4db586e30de67ba5a063ea20e188a512d747eb95e01cf91"} Sep 29 19:12:54 crc kubenswrapper[4779]: I0929 19:12:54.861512 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-vcrlk" Sep 29 19:12:54 crc kubenswrapper[4779]: I0929 19:12:54.862543 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-4nts8" event={"ID":"cb7eba8f-ce89-4f64-a05a-d73d0c3b630b","Type":"ContainerDied","Data":"4a46f3ee3c87c70006d0f1b37925c5413e7bc5c140d11c54612d53cb92b86a56"} Sep 29 19:12:54 crc kubenswrapper[4779]: I0929 19:12:54.862571 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-4nts8" Sep 29 19:12:54 crc kubenswrapper[4779]: I0929 19:12:54.862601 4779 scope.go:117] "RemoveContainer" containerID="0f5d45d2042f252e7caa75a1e1f5e74df23aeb7e6c0d7db3e2c7bc9d4eac333c" Sep 29 19:12:54 crc kubenswrapper[4779]: I0929 19:12:54.870226 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-vcrlk" Sep 29 19:12:54 crc kubenswrapper[4779]: I0929 19:12:54.872275 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-4b959" event={"ID":"6cf06f8e-1088-4c70-809f-c4b1a55e96e5","Type":"ContainerDied","Data":"0fe3ef18a5cdd947651af07fc4c43f15097b4472927aa691930d4b852f0df960"} Sep 29 19:12:54 crc kubenswrapper[4779]: I0929 19:12:54.872394 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-4b959" Sep 29 19:12:54 crc kubenswrapper[4779]: I0929 19:12:54.876086 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-8ncmv" event={"ID":"37942c99-d186-454f-9d72-04ec19e7c737","Type":"ContainerDied","Data":"c8b5f599216a8cc889424d5e3a93b55d280a372b70df1b6b3b47f4b51e8a44e5"} Sep 29 19:12:54 crc kubenswrapper[4779]: I0929 19:12:54.876183 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-8ncmv" Sep 29 19:12:54 crc kubenswrapper[4779]: I0929 19:12:54.884660 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-bt79h" event={"ID":"ec0b9d1c-6776-4434-9a56-756a3fc1fc5e","Type":"ContainerDied","Data":"9fccf9ae097860af07f76a2699b7b305643752e55f7e5c371b1b49ba23e374fa"} Sep 29 19:12:54 crc kubenswrapper[4779]: I0929 19:12:54.885012 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-bt79h" Sep 29 19:12:54 crc kubenswrapper[4779]: I0929 19:12:54.885729 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-vcrlk" podStartSLOduration=1.885711536 podStartE2EDuration="1.885711536s" podCreationTimestamp="2025-09-29 19:12:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 19:12:54.880868189 +0000 UTC m=+285.765293299" watchObservedRunningTime="2025-09-29 19:12:54.885711536 +0000 UTC m=+285.770136636" Sep 29 19:12:54 crc kubenswrapper[4779]: I0929 19:12:54.895244 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-rxml4" event={"ID":"79af1e0e-dad6-413a-80ea-699ef7f5cdad","Type":"ContainerDied","Data":"ea8e919eee625b008dc50d16ef0cf486bb78bbcc68e6a65733ec0db089e4a40d"} Sep 29 19:12:54 crc kubenswrapper[4779]: I0929 19:12:54.895362 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-rxml4" Sep 29 19:12:54 crc kubenswrapper[4779]: I0929 19:12:54.907156 4779 scope.go:117] "RemoveContainer" containerID="256a02e00f3640e43b9b6cc0c3da0403991fbeee414822b496316ab3fb365a1c" Sep 29 19:12:54 crc kubenswrapper[4779]: I0929 19:12:54.935408 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-4nts8"] Sep 29 19:12:54 crc kubenswrapper[4779]: I0929 19:12:54.938898 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-4nts8"] Sep 29 19:12:54 crc kubenswrapper[4779]: I0929 19:12:54.939869 4779 scope.go:117] "RemoveContainer" containerID="6eded466b41152f2d69db3e6433962d9fbe46352f7623528f6921d9e3e72196d" Sep 29 19:12:54 crc kubenswrapper[4779]: I0929 19:12:54.948667 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-4b959"] Sep 29 19:12:54 crc kubenswrapper[4779]: I0929 19:12:54.953040 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-4b959"] Sep 29 19:12:54 crc kubenswrapper[4779]: I0929 19:12:54.965463 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-8ncmv"] Sep 29 19:12:54 crc kubenswrapper[4779]: I0929 19:12:54.972486 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-8ncmv"] Sep 29 19:12:54 crc kubenswrapper[4779]: I0929 19:12:54.979036 4779 scope.go:117] "RemoveContainer" containerID="7dc6b099204da081e3fd6cc015737a917db7d93444374f97282895ab0ef89e8d" Sep 29 19:12:54 crc kubenswrapper[4779]: I0929 19:12:54.982308 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-rxml4"] Sep 29 19:12:54 crc kubenswrapper[4779]: I0929 19:12:54.985080 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-rxml4"] Sep 29 19:12:54 crc kubenswrapper[4779]: I0929 19:12:54.987841 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-bt79h"] Sep 29 19:12:54 crc kubenswrapper[4779]: I0929 19:12:54.989889 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-bt79h"] Sep 29 19:12:54 crc kubenswrapper[4779]: I0929 19:12:54.993521 4779 scope.go:117] "RemoveContainer" containerID="856368aee0f6c0d45213fe87848ce2c0ac53cca9d92d1ac0cb5ed50efab57483" Sep 29 19:12:55 crc kubenswrapper[4779]: I0929 19:12:55.018615 4779 scope.go:117] "RemoveContainer" containerID="e8d205b24efa49a093f990a3e22bb5f7a278c80b4b7f3bc4b14ebd48cebd0ba6" Sep 29 19:12:55 crc kubenswrapper[4779]: I0929 19:12:55.035221 4779 scope.go:117] "RemoveContainer" containerID="1123e5bd8b660916fee65d2faee7d6ff0345ddfa9bd3e10d0e8cc9e4048efd77" Sep 29 19:12:55 crc kubenswrapper[4779]: I0929 19:12:55.053495 4779 scope.go:117] "RemoveContainer" containerID="cb08b414bc2257510dcc1ebf7f840b380fee9b30e0f680d8209c87b9e130fdef" Sep 29 19:12:55 crc kubenswrapper[4779]: I0929 19:12:55.066274 4779 scope.go:117] "RemoveContainer" containerID="3b0dfa255b289d6fbe2a4ae6a40261feb39a7c74bc8cddc737c3f637c81080f7" Sep 29 19:12:55 crc kubenswrapper[4779]: I0929 19:12:55.076934 4779 scope.go:117] "RemoveContainer" containerID="149337cc4c25d47ca0668834e7e6ec91e5e106613863c5b03a3e0a8b2f2d7ed3" Sep 29 19:12:55 crc kubenswrapper[4779]: I0929 19:12:55.089784 4779 scope.go:117] "RemoveContainer" containerID="ffe9bf7d456cfc770cba37cf963bbaebf117efb7d01061fbd5f40e061c190c07" Sep 29 19:12:55 crc kubenswrapper[4779]: I0929 19:12:55.102665 4779 scope.go:117] "RemoveContainer" containerID="12e503eda918eba14a51b7344a1c993a125d3ea878ab62bda95f69888d5561db" Sep 29 19:12:55 crc kubenswrapper[4779]: I0929 19:12:55.114855 4779 scope.go:117] "RemoveContainer" containerID="837d718292acb63772a0f7caee199e9ba5243f6e7c5cb6032e354690ae060368" Sep 29 19:12:55 crc kubenswrapper[4779]: I0929 19:12:55.773792 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="37942c99-d186-454f-9d72-04ec19e7c737" path="/var/lib/kubelet/pods/37942c99-d186-454f-9d72-04ec19e7c737/volumes" Sep 29 19:12:55 crc kubenswrapper[4779]: I0929 19:12:55.774811 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6cf06f8e-1088-4c70-809f-c4b1a55e96e5" path="/var/lib/kubelet/pods/6cf06f8e-1088-4c70-809f-c4b1a55e96e5/volumes" Sep 29 19:12:55 crc kubenswrapper[4779]: I0929 19:12:55.775496 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="79af1e0e-dad6-413a-80ea-699ef7f5cdad" path="/var/lib/kubelet/pods/79af1e0e-dad6-413a-80ea-699ef7f5cdad/volumes" Sep 29 19:12:55 crc kubenswrapper[4779]: I0929 19:12:55.776656 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cb7eba8f-ce89-4f64-a05a-d73d0c3b630b" path="/var/lib/kubelet/pods/cb7eba8f-ce89-4f64-a05a-d73d0c3b630b/volumes" Sep 29 19:12:55 crc kubenswrapper[4779]: I0929 19:12:55.777423 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ec0b9d1c-6776-4434-9a56-756a3fc1fc5e" path="/var/lib/kubelet/pods/ec0b9d1c-6776-4434-9a56-756a3fc1fc5e/volumes" Sep 29 19:12:55 crc kubenswrapper[4779]: I0929 19:12:55.860018 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-6w68s"] Sep 29 19:12:55 crc kubenswrapper[4779]: E0929 19:12:55.860237 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cb7eba8f-ce89-4f64-a05a-d73d0c3b630b" containerName="extract-utilities" Sep 29 19:12:55 crc kubenswrapper[4779]: I0929 19:12:55.860258 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="cb7eba8f-ce89-4f64-a05a-d73d0c3b630b" containerName="extract-utilities" Sep 29 19:12:55 crc kubenswrapper[4779]: E0929 19:12:55.860274 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="37942c99-d186-454f-9d72-04ec19e7c737" containerName="extract-utilities" Sep 29 19:12:55 crc kubenswrapper[4779]: I0929 19:12:55.860283 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="37942c99-d186-454f-9d72-04ec19e7c737" containerName="extract-utilities" Sep 29 19:12:55 crc kubenswrapper[4779]: E0929 19:12:55.860295 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cb7eba8f-ce89-4f64-a05a-d73d0c3b630b" containerName="registry-server" Sep 29 19:12:55 crc kubenswrapper[4779]: I0929 19:12:55.860304 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="cb7eba8f-ce89-4f64-a05a-d73d0c3b630b" containerName="registry-server" Sep 29 19:12:55 crc kubenswrapper[4779]: E0929 19:12:55.860322 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="79af1e0e-dad6-413a-80ea-699ef7f5cdad" containerName="registry-server" Sep 29 19:12:55 crc kubenswrapper[4779]: I0929 19:12:55.860329 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="79af1e0e-dad6-413a-80ea-699ef7f5cdad" containerName="registry-server" Sep 29 19:12:55 crc kubenswrapper[4779]: E0929 19:12:55.860511 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="79af1e0e-dad6-413a-80ea-699ef7f5cdad" containerName="extract-content" Sep 29 19:12:55 crc kubenswrapper[4779]: I0929 19:12:55.860579 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="79af1e0e-dad6-413a-80ea-699ef7f5cdad" containerName="extract-content" Sep 29 19:12:55 crc kubenswrapper[4779]: E0929 19:12:55.860591 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6cf06f8e-1088-4c70-809f-c4b1a55e96e5" containerName="registry-server" Sep 29 19:12:55 crc kubenswrapper[4779]: I0929 19:12:55.860600 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="6cf06f8e-1088-4c70-809f-c4b1a55e96e5" containerName="registry-server" Sep 29 19:12:55 crc kubenswrapper[4779]: E0929 19:12:55.860610 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ec0b9d1c-6776-4434-9a56-756a3fc1fc5e" containerName="marketplace-operator" Sep 29 19:12:55 crc kubenswrapper[4779]: I0929 19:12:55.860709 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="ec0b9d1c-6776-4434-9a56-756a3fc1fc5e" containerName="marketplace-operator" Sep 29 19:12:55 crc kubenswrapper[4779]: E0929 19:12:55.860723 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6cf06f8e-1088-4c70-809f-c4b1a55e96e5" containerName="extract-utilities" Sep 29 19:12:55 crc kubenswrapper[4779]: I0929 19:12:55.860730 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="6cf06f8e-1088-4c70-809f-c4b1a55e96e5" containerName="extract-utilities" Sep 29 19:12:55 crc kubenswrapper[4779]: E0929 19:12:55.860742 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="37942c99-d186-454f-9d72-04ec19e7c737" containerName="registry-server" Sep 29 19:12:55 crc kubenswrapper[4779]: I0929 19:12:55.860751 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="37942c99-d186-454f-9d72-04ec19e7c737" containerName="registry-server" Sep 29 19:12:55 crc kubenswrapper[4779]: E0929 19:12:55.860786 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="37942c99-d186-454f-9d72-04ec19e7c737" containerName="extract-content" Sep 29 19:12:55 crc kubenswrapper[4779]: I0929 19:12:55.860795 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="37942c99-d186-454f-9d72-04ec19e7c737" containerName="extract-content" Sep 29 19:12:55 crc kubenswrapper[4779]: E0929 19:12:55.860826 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cb7eba8f-ce89-4f64-a05a-d73d0c3b630b" containerName="extract-content" Sep 29 19:12:55 crc kubenswrapper[4779]: I0929 19:12:55.860836 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="cb7eba8f-ce89-4f64-a05a-d73d0c3b630b" containerName="extract-content" Sep 29 19:12:55 crc kubenswrapper[4779]: E0929 19:12:55.860870 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="79af1e0e-dad6-413a-80ea-699ef7f5cdad" containerName="extract-utilities" Sep 29 19:12:55 crc kubenswrapper[4779]: I0929 19:12:55.860878 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="79af1e0e-dad6-413a-80ea-699ef7f5cdad" containerName="extract-utilities" Sep 29 19:12:55 crc kubenswrapper[4779]: E0929 19:12:55.860885 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6cf06f8e-1088-4c70-809f-c4b1a55e96e5" containerName="extract-content" Sep 29 19:12:55 crc kubenswrapper[4779]: I0929 19:12:55.860892 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="6cf06f8e-1088-4c70-809f-c4b1a55e96e5" containerName="extract-content" Sep 29 19:12:55 crc kubenswrapper[4779]: I0929 19:12:55.861103 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="37942c99-d186-454f-9d72-04ec19e7c737" containerName="registry-server" Sep 29 19:12:55 crc kubenswrapper[4779]: I0929 19:12:55.861115 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="79af1e0e-dad6-413a-80ea-699ef7f5cdad" containerName="registry-server" Sep 29 19:12:55 crc kubenswrapper[4779]: I0929 19:12:55.861124 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="cb7eba8f-ce89-4f64-a05a-d73d0c3b630b" containerName="registry-server" Sep 29 19:12:55 crc kubenswrapper[4779]: I0929 19:12:55.861136 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="ec0b9d1c-6776-4434-9a56-756a3fc1fc5e" containerName="marketplace-operator" Sep 29 19:12:55 crc kubenswrapper[4779]: I0929 19:12:55.861151 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="6cf06f8e-1088-4c70-809f-c4b1a55e96e5" containerName="registry-server" Sep 29 19:12:55 crc kubenswrapper[4779]: I0929 19:12:55.861927 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-6w68s" Sep 29 19:12:55 crc kubenswrapper[4779]: I0929 19:12:55.863393 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Sep 29 19:12:55 crc kubenswrapper[4779]: I0929 19:12:55.870197 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-6w68s"] Sep 29 19:12:55 crc kubenswrapper[4779]: I0929 19:12:55.980519 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/664a3e86-9290-495f-9b9b-7b8fe5dc7177-utilities\") pod \"redhat-marketplace-6w68s\" (UID: \"664a3e86-9290-495f-9b9b-7b8fe5dc7177\") " pod="openshift-marketplace/redhat-marketplace-6w68s" Sep 29 19:12:55 crc kubenswrapper[4779]: I0929 19:12:55.980735 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qhbf4\" (UniqueName: \"kubernetes.io/projected/664a3e86-9290-495f-9b9b-7b8fe5dc7177-kube-api-access-qhbf4\") pod \"redhat-marketplace-6w68s\" (UID: \"664a3e86-9290-495f-9b9b-7b8fe5dc7177\") " pod="openshift-marketplace/redhat-marketplace-6w68s" Sep 29 19:12:55 crc kubenswrapper[4779]: I0929 19:12:55.980796 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/664a3e86-9290-495f-9b9b-7b8fe5dc7177-catalog-content\") pod \"redhat-marketplace-6w68s\" (UID: \"664a3e86-9290-495f-9b9b-7b8fe5dc7177\") " pod="openshift-marketplace/redhat-marketplace-6w68s" Sep 29 19:12:56 crc kubenswrapper[4779]: I0929 19:12:56.056865 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-hvfth"] Sep 29 19:12:56 crc kubenswrapper[4779]: I0929 19:12:56.058020 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-hvfth" Sep 29 19:12:56 crc kubenswrapper[4779]: I0929 19:12:56.060041 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Sep 29 19:12:56 crc kubenswrapper[4779]: I0929 19:12:56.064779 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-hvfth"] Sep 29 19:12:56 crc kubenswrapper[4779]: I0929 19:12:56.081604 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qhbf4\" (UniqueName: \"kubernetes.io/projected/664a3e86-9290-495f-9b9b-7b8fe5dc7177-kube-api-access-qhbf4\") pod \"redhat-marketplace-6w68s\" (UID: \"664a3e86-9290-495f-9b9b-7b8fe5dc7177\") " pod="openshift-marketplace/redhat-marketplace-6w68s" Sep 29 19:12:56 crc kubenswrapper[4779]: I0929 19:12:56.081653 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/664a3e86-9290-495f-9b9b-7b8fe5dc7177-catalog-content\") pod \"redhat-marketplace-6w68s\" (UID: \"664a3e86-9290-495f-9b9b-7b8fe5dc7177\") " pod="openshift-marketplace/redhat-marketplace-6w68s" Sep 29 19:12:56 crc kubenswrapper[4779]: I0929 19:12:56.081719 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/664a3e86-9290-495f-9b9b-7b8fe5dc7177-utilities\") pod \"redhat-marketplace-6w68s\" (UID: \"664a3e86-9290-495f-9b9b-7b8fe5dc7177\") " pod="openshift-marketplace/redhat-marketplace-6w68s" Sep 29 19:12:56 crc kubenswrapper[4779]: I0929 19:12:56.082192 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/664a3e86-9290-495f-9b9b-7b8fe5dc7177-catalog-content\") pod \"redhat-marketplace-6w68s\" (UID: \"664a3e86-9290-495f-9b9b-7b8fe5dc7177\") " pod="openshift-marketplace/redhat-marketplace-6w68s" Sep 29 19:12:56 crc kubenswrapper[4779]: I0929 19:12:56.082228 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/664a3e86-9290-495f-9b9b-7b8fe5dc7177-utilities\") pod \"redhat-marketplace-6w68s\" (UID: \"664a3e86-9290-495f-9b9b-7b8fe5dc7177\") " pod="openshift-marketplace/redhat-marketplace-6w68s" Sep 29 19:12:56 crc kubenswrapper[4779]: I0929 19:12:56.097605 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qhbf4\" (UniqueName: \"kubernetes.io/projected/664a3e86-9290-495f-9b9b-7b8fe5dc7177-kube-api-access-qhbf4\") pod \"redhat-marketplace-6w68s\" (UID: \"664a3e86-9290-495f-9b9b-7b8fe5dc7177\") " pod="openshift-marketplace/redhat-marketplace-6w68s" Sep 29 19:12:56 crc kubenswrapper[4779]: I0929 19:12:56.183202 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9bd42e07-23d9-49a8-b02d-bd11be36fc0c-utilities\") pod \"certified-operators-hvfth\" (UID: \"9bd42e07-23d9-49a8-b02d-bd11be36fc0c\") " pod="openshift-marketplace/certified-operators-hvfth" Sep 29 19:12:56 crc kubenswrapper[4779]: I0929 19:12:56.183286 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hj4zq\" (UniqueName: \"kubernetes.io/projected/9bd42e07-23d9-49a8-b02d-bd11be36fc0c-kube-api-access-hj4zq\") pod \"certified-operators-hvfth\" (UID: \"9bd42e07-23d9-49a8-b02d-bd11be36fc0c\") " pod="openshift-marketplace/certified-operators-hvfth" Sep 29 19:12:56 crc kubenswrapper[4779]: I0929 19:12:56.183347 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9bd42e07-23d9-49a8-b02d-bd11be36fc0c-catalog-content\") pod \"certified-operators-hvfth\" (UID: \"9bd42e07-23d9-49a8-b02d-bd11be36fc0c\") " pod="openshift-marketplace/certified-operators-hvfth" Sep 29 19:12:56 crc kubenswrapper[4779]: I0929 19:12:56.189097 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-6w68s" Sep 29 19:12:56 crc kubenswrapper[4779]: I0929 19:12:56.284623 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hj4zq\" (UniqueName: \"kubernetes.io/projected/9bd42e07-23d9-49a8-b02d-bd11be36fc0c-kube-api-access-hj4zq\") pod \"certified-operators-hvfth\" (UID: \"9bd42e07-23d9-49a8-b02d-bd11be36fc0c\") " pod="openshift-marketplace/certified-operators-hvfth" Sep 29 19:12:56 crc kubenswrapper[4779]: I0929 19:12:56.284975 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9bd42e07-23d9-49a8-b02d-bd11be36fc0c-catalog-content\") pod \"certified-operators-hvfth\" (UID: \"9bd42e07-23d9-49a8-b02d-bd11be36fc0c\") " pod="openshift-marketplace/certified-operators-hvfth" Sep 29 19:12:56 crc kubenswrapper[4779]: I0929 19:12:56.285879 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9bd42e07-23d9-49a8-b02d-bd11be36fc0c-catalog-content\") pod \"certified-operators-hvfth\" (UID: \"9bd42e07-23d9-49a8-b02d-bd11be36fc0c\") " pod="openshift-marketplace/certified-operators-hvfth" Sep 29 19:12:56 crc kubenswrapper[4779]: I0929 19:12:56.285953 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9bd42e07-23d9-49a8-b02d-bd11be36fc0c-utilities\") pod \"certified-operators-hvfth\" (UID: \"9bd42e07-23d9-49a8-b02d-bd11be36fc0c\") " pod="openshift-marketplace/certified-operators-hvfth" Sep 29 19:12:56 crc kubenswrapper[4779]: I0929 19:12:56.285047 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9bd42e07-23d9-49a8-b02d-bd11be36fc0c-utilities\") pod \"certified-operators-hvfth\" (UID: \"9bd42e07-23d9-49a8-b02d-bd11be36fc0c\") " pod="openshift-marketplace/certified-operators-hvfth" Sep 29 19:12:56 crc kubenswrapper[4779]: I0929 19:12:56.303145 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hj4zq\" (UniqueName: \"kubernetes.io/projected/9bd42e07-23d9-49a8-b02d-bd11be36fc0c-kube-api-access-hj4zq\") pod \"certified-operators-hvfth\" (UID: \"9bd42e07-23d9-49a8-b02d-bd11be36fc0c\") " pod="openshift-marketplace/certified-operators-hvfth" Sep 29 19:12:56 crc kubenswrapper[4779]: I0929 19:12:56.377328 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-6w68s"] Sep 29 19:12:56 crc kubenswrapper[4779]: I0929 19:12:56.381439 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-hvfth" Sep 29 19:12:56 crc kubenswrapper[4779]: I0929 19:12:56.766891 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-hvfth"] Sep 29 19:12:56 crc kubenswrapper[4779]: I0929 19:12:56.908387 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hvfth" event={"ID":"9bd42e07-23d9-49a8-b02d-bd11be36fc0c","Type":"ContainerStarted","Data":"9cabc1346ff38797a006a6dcb6c8452c8a73507b272a43329a975fab4f7dc2ce"} Sep 29 19:12:56 crc kubenswrapper[4779]: I0929 19:12:56.908427 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hvfth" event={"ID":"9bd42e07-23d9-49a8-b02d-bd11be36fc0c","Type":"ContainerStarted","Data":"258c7644082a0d1ca3e980ca89c2aed413d768db9c1a75a576c35ddb12e432e6"} Sep 29 19:12:56 crc kubenswrapper[4779]: I0929 19:12:56.911914 4779 generic.go:334] "Generic (PLEG): container finished" podID="664a3e86-9290-495f-9b9b-7b8fe5dc7177" containerID="f49c6b6a3e89dbdb100305acf691d7d76b405ce07a28be339a0bdd62b8cb481a" exitCode=0 Sep 29 19:12:56 crc kubenswrapper[4779]: I0929 19:12:56.911974 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-6w68s" event={"ID":"664a3e86-9290-495f-9b9b-7b8fe5dc7177","Type":"ContainerDied","Data":"f49c6b6a3e89dbdb100305acf691d7d76b405ce07a28be339a0bdd62b8cb481a"} Sep 29 19:12:56 crc kubenswrapper[4779]: I0929 19:12:56.912017 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-6w68s" event={"ID":"664a3e86-9290-495f-9b9b-7b8fe5dc7177","Type":"ContainerStarted","Data":"cbbac36d5822ea971b940bfdd856d9f87bad7ba7844c0516c6d9dd14992db855"} Sep 29 19:12:57 crc kubenswrapper[4779]: I0929 19:12:57.919355 4779 generic.go:334] "Generic (PLEG): container finished" podID="9bd42e07-23d9-49a8-b02d-bd11be36fc0c" containerID="9cabc1346ff38797a006a6dcb6c8452c8a73507b272a43329a975fab4f7dc2ce" exitCode=0 Sep 29 19:12:57 crc kubenswrapper[4779]: I0929 19:12:57.919538 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hvfth" event={"ID":"9bd42e07-23d9-49a8-b02d-bd11be36fc0c","Type":"ContainerDied","Data":"9cabc1346ff38797a006a6dcb6c8452c8a73507b272a43329a975fab4f7dc2ce"} Sep 29 19:12:58 crc kubenswrapper[4779]: I0929 19:12:58.263217 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-9v764"] Sep 29 19:12:58 crc kubenswrapper[4779]: I0929 19:12:58.266789 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-9v764" Sep 29 19:12:58 crc kubenswrapper[4779]: I0929 19:12:58.269518 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-9v764"] Sep 29 19:12:58 crc kubenswrapper[4779]: I0929 19:12:58.272741 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Sep 29 19:12:58 crc kubenswrapper[4779]: I0929 19:12:58.309455 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/23a92b93-fce1-410f-b088-394235c8d3b8-catalog-content\") pod \"redhat-operators-9v764\" (UID: \"23a92b93-fce1-410f-b088-394235c8d3b8\") " pod="openshift-marketplace/redhat-operators-9v764" Sep 29 19:12:58 crc kubenswrapper[4779]: I0929 19:12:58.309497 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/23a92b93-fce1-410f-b088-394235c8d3b8-utilities\") pod \"redhat-operators-9v764\" (UID: \"23a92b93-fce1-410f-b088-394235c8d3b8\") " pod="openshift-marketplace/redhat-operators-9v764" Sep 29 19:12:58 crc kubenswrapper[4779]: I0929 19:12:58.309561 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zp4t8\" (UniqueName: \"kubernetes.io/projected/23a92b93-fce1-410f-b088-394235c8d3b8-kube-api-access-zp4t8\") pod \"redhat-operators-9v764\" (UID: \"23a92b93-fce1-410f-b088-394235c8d3b8\") " pod="openshift-marketplace/redhat-operators-9v764" Sep 29 19:12:58 crc kubenswrapper[4779]: I0929 19:12:58.410699 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/23a92b93-fce1-410f-b088-394235c8d3b8-catalog-content\") pod \"redhat-operators-9v764\" (UID: \"23a92b93-fce1-410f-b088-394235c8d3b8\") " pod="openshift-marketplace/redhat-operators-9v764" Sep 29 19:12:58 crc kubenswrapper[4779]: I0929 19:12:58.410770 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/23a92b93-fce1-410f-b088-394235c8d3b8-utilities\") pod \"redhat-operators-9v764\" (UID: \"23a92b93-fce1-410f-b088-394235c8d3b8\") " pod="openshift-marketplace/redhat-operators-9v764" Sep 29 19:12:58 crc kubenswrapper[4779]: I0929 19:12:58.410909 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zp4t8\" (UniqueName: \"kubernetes.io/projected/23a92b93-fce1-410f-b088-394235c8d3b8-kube-api-access-zp4t8\") pod \"redhat-operators-9v764\" (UID: \"23a92b93-fce1-410f-b088-394235c8d3b8\") " pod="openshift-marketplace/redhat-operators-9v764" Sep 29 19:12:58 crc kubenswrapper[4779]: I0929 19:12:58.411631 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/23a92b93-fce1-410f-b088-394235c8d3b8-utilities\") pod \"redhat-operators-9v764\" (UID: \"23a92b93-fce1-410f-b088-394235c8d3b8\") " pod="openshift-marketplace/redhat-operators-9v764" Sep 29 19:12:58 crc kubenswrapper[4779]: I0929 19:12:58.411650 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/23a92b93-fce1-410f-b088-394235c8d3b8-catalog-content\") pod \"redhat-operators-9v764\" (UID: \"23a92b93-fce1-410f-b088-394235c8d3b8\") " pod="openshift-marketplace/redhat-operators-9v764" Sep 29 19:12:58 crc kubenswrapper[4779]: I0929 19:12:58.436626 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zp4t8\" (UniqueName: \"kubernetes.io/projected/23a92b93-fce1-410f-b088-394235c8d3b8-kube-api-access-zp4t8\") pod \"redhat-operators-9v764\" (UID: \"23a92b93-fce1-410f-b088-394235c8d3b8\") " pod="openshift-marketplace/redhat-operators-9v764" Sep 29 19:12:58 crc kubenswrapper[4779]: I0929 19:12:58.467569 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-rtkr2"] Sep 29 19:12:58 crc kubenswrapper[4779]: I0929 19:12:58.470165 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-rtkr2" Sep 29 19:12:58 crc kubenswrapper[4779]: I0929 19:12:58.472489 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Sep 29 19:12:58 crc kubenswrapper[4779]: I0929 19:12:58.476941 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-rtkr2"] Sep 29 19:12:58 crc kubenswrapper[4779]: I0929 19:12:58.512068 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/97741d17-ab9c-415a-b310-38e8c914d91a-utilities\") pod \"community-operators-rtkr2\" (UID: \"97741d17-ab9c-415a-b310-38e8c914d91a\") " pod="openshift-marketplace/community-operators-rtkr2" Sep 29 19:12:58 crc kubenswrapper[4779]: I0929 19:12:58.512112 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/97741d17-ab9c-415a-b310-38e8c914d91a-catalog-content\") pod \"community-operators-rtkr2\" (UID: \"97741d17-ab9c-415a-b310-38e8c914d91a\") " pod="openshift-marketplace/community-operators-rtkr2" Sep 29 19:12:58 crc kubenswrapper[4779]: I0929 19:12:58.512160 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qnmgj\" (UniqueName: \"kubernetes.io/projected/97741d17-ab9c-415a-b310-38e8c914d91a-kube-api-access-qnmgj\") pod \"community-operators-rtkr2\" (UID: \"97741d17-ab9c-415a-b310-38e8c914d91a\") " pod="openshift-marketplace/community-operators-rtkr2" Sep 29 19:12:58 crc kubenswrapper[4779]: I0929 19:12:58.613479 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qnmgj\" (UniqueName: \"kubernetes.io/projected/97741d17-ab9c-415a-b310-38e8c914d91a-kube-api-access-qnmgj\") pod \"community-operators-rtkr2\" (UID: \"97741d17-ab9c-415a-b310-38e8c914d91a\") " pod="openshift-marketplace/community-operators-rtkr2" Sep 29 19:12:58 crc kubenswrapper[4779]: I0929 19:12:58.613588 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/97741d17-ab9c-415a-b310-38e8c914d91a-catalog-content\") pod \"community-operators-rtkr2\" (UID: \"97741d17-ab9c-415a-b310-38e8c914d91a\") " pod="openshift-marketplace/community-operators-rtkr2" Sep 29 19:12:58 crc kubenswrapper[4779]: I0929 19:12:58.613614 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/97741d17-ab9c-415a-b310-38e8c914d91a-utilities\") pod \"community-operators-rtkr2\" (UID: \"97741d17-ab9c-415a-b310-38e8c914d91a\") " pod="openshift-marketplace/community-operators-rtkr2" Sep 29 19:12:58 crc kubenswrapper[4779]: I0929 19:12:58.614080 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/97741d17-ab9c-415a-b310-38e8c914d91a-catalog-content\") pod \"community-operators-rtkr2\" (UID: \"97741d17-ab9c-415a-b310-38e8c914d91a\") " pod="openshift-marketplace/community-operators-rtkr2" Sep 29 19:12:58 crc kubenswrapper[4779]: I0929 19:12:58.614112 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/97741d17-ab9c-415a-b310-38e8c914d91a-utilities\") pod \"community-operators-rtkr2\" (UID: \"97741d17-ab9c-415a-b310-38e8c914d91a\") " pod="openshift-marketplace/community-operators-rtkr2" Sep 29 19:12:58 crc kubenswrapper[4779]: I0929 19:12:58.624610 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-9v764" Sep 29 19:12:58 crc kubenswrapper[4779]: I0929 19:12:58.632389 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qnmgj\" (UniqueName: \"kubernetes.io/projected/97741d17-ab9c-415a-b310-38e8c914d91a-kube-api-access-qnmgj\") pod \"community-operators-rtkr2\" (UID: \"97741d17-ab9c-415a-b310-38e8c914d91a\") " pod="openshift-marketplace/community-operators-rtkr2" Sep 29 19:12:58 crc kubenswrapper[4779]: I0929 19:12:58.798544 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-rtkr2" Sep 29 19:12:58 crc kubenswrapper[4779]: I0929 19:12:58.934996 4779 generic.go:334] "Generic (PLEG): container finished" podID="664a3e86-9290-495f-9b9b-7b8fe5dc7177" containerID="2aaeb5a045ed2308dce3a5fa47fe76cab0e70375d2d30ffa53d409657eb60bff" exitCode=0 Sep 29 19:12:58 crc kubenswrapper[4779]: I0929 19:12:58.935084 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-6w68s" event={"ID":"664a3e86-9290-495f-9b9b-7b8fe5dc7177","Type":"ContainerDied","Data":"2aaeb5a045ed2308dce3a5fa47fe76cab0e70375d2d30ffa53d409657eb60bff"} Sep 29 19:12:58 crc kubenswrapper[4779]: I0929 19:12:58.940436 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hvfth" event={"ID":"9bd42e07-23d9-49a8-b02d-bd11be36fc0c","Type":"ContainerStarted","Data":"54e7e9b273cf21acd5d08368f13588ba615d56d7b1cdb158ab75c3d49ab89d57"} Sep 29 19:12:58 crc kubenswrapper[4779]: I0929 19:12:58.987995 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-rtkr2"] Sep 29 19:12:59 crc kubenswrapper[4779]: W0929 19:12:59.009552 4779 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod97741d17_ab9c_415a_b310_38e8c914d91a.slice/crio-9ef3b2c2d25aeaa5ab3609b2e6f6f0f82ba9e3ce31f9975b8ff321c43c3e9bc7 WatchSource:0}: Error finding container 9ef3b2c2d25aeaa5ab3609b2e6f6f0f82ba9e3ce31f9975b8ff321c43c3e9bc7: Status 404 returned error can't find the container with id 9ef3b2c2d25aeaa5ab3609b2e6f6f0f82ba9e3ce31f9975b8ff321c43c3e9bc7 Sep 29 19:12:59 crc kubenswrapper[4779]: I0929 19:12:59.037588 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-9v764"] Sep 29 19:12:59 crc kubenswrapper[4779]: W0929 19:12:59.041702 4779 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod23a92b93_fce1_410f_b088_394235c8d3b8.slice/crio-eeb9cca05bf55c82f5018363bf1a86f39857e364f073755974179f3fc2fddc82 WatchSource:0}: Error finding container eeb9cca05bf55c82f5018363bf1a86f39857e364f073755974179f3fc2fddc82: Status 404 returned error can't find the container with id eeb9cca05bf55c82f5018363bf1a86f39857e364f073755974179f3fc2fddc82 Sep 29 19:12:59 crc kubenswrapper[4779]: I0929 19:12:59.949660 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-6w68s" event={"ID":"664a3e86-9290-495f-9b9b-7b8fe5dc7177","Type":"ContainerStarted","Data":"98c9b554ad201ab54b65e91a749c603deaad8c9857d040d314bdf1148aa13cd7"} Sep 29 19:12:59 crc kubenswrapper[4779]: I0929 19:12:59.951386 4779 generic.go:334] "Generic (PLEG): container finished" podID="97741d17-ab9c-415a-b310-38e8c914d91a" containerID="56cf92153d0b68c1f7427f00de99a9f1733a1d4af841114a23ef8ed6a475fc47" exitCode=0 Sep 29 19:12:59 crc kubenswrapper[4779]: I0929 19:12:59.951465 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-rtkr2" event={"ID":"97741d17-ab9c-415a-b310-38e8c914d91a","Type":"ContainerDied","Data":"56cf92153d0b68c1f7427f00de99a9f1733a1d4af841114a23ef8ed6a475fc47"} Sep 29 19:12:59 crc kubenswrapper[4779]: I0929 19:12:59.951516 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-rtkr2" event={"ID":"97741d17-ab9c-415a-b310-38e8c914d91a","Type":"ContainerStarted","Data":"9ef3b2c2d25aeaa5ab3609b2e6f6f0f82ba9e3ce31f9975b8ff321c43c3e9bc7"} Sep 29 19:12:59 crc kubenswrapper[4779]: I0929 19:12:59.957518 4779 generic.go:334] "Generic (PLEG): container finished" podID="23a92b93-fce1-410f-b088-394235c8d3b8" containerID="71f23bdf594d38e4890b5b25d84f9d7c2053942d72577a77effed9f6c5b18537" exitCode=0 Sep 29 19:12:59 crc kubenswrapper[4779]: I0929 19:12:59.957591 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-9v764" event={"ID":"23a92b93-fce1-410f-b088-394235c8d3b8","Type":"ContainerDied","Data":"71f23bdf594d38e4890b5b25d84f9d7c2053942d72577a77effed9f6c5b18537"} Sep 29 19:12:59 crc kubenswrapper[4779]: I0929 19:12:59.957612 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-9v764" event={"ID":"23a92b93-fce1-410f-b088-394235c8d3b8","Type":"ContainerStarted","Data":"eeb9cca05bf55c82f5018363bf1a86f39857e364f073755974179f3fc2fddc82"} Sep 29 19:12:59 crc kubenswrapper[4779]: I0929 19:12:59.960215 4779 generic.go:334] "Generic (PLEG): container finished" podID="9bd42e07-23d9-49a8-b02d-bd11be36fc0c" containerID="54e7e9b273cf21acd5d08368f13588ba615d56d7b1cdb158ab75c3d49ab89d57" exitCode=0 Sep 29 19:12:59 crc kubenswrapper[4779]: I0929 19:12:59.960263 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hvfth" event={"ID":"9bd42e07-23d9-49a8-b02d-bd11be36fc0c","Type":"ContainerDied","Data":"54e7e9b273cf21acd5d08368f13588ba615d56d7b1cdb158ab75c3d49ab89d57"} Sep 29 19:12:59 crc kubenswrapper[4779]: I0929 19:12:59.969616 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-6w68s" podStartSLOduration=2.53668241 podStartE2EDuration="4.969593878s" podCreationTimestamp="2025-09-29 19:12:55 +0000 UTC" firstStartedPulling="2025-09-29 19:12:56.913280479 +0000 UTC m=+287.797705569" lastFinishedPulling="2025-09-29 19:12:59.346191937 +0000 UTC m=+290.230617037" observedRunningTime="2025-09-29 19:12:59.968129012 +0000 UTC m=+290.852554152" watchObservedRunningTime="2025-09-29 19:12:59.969593878 +0000 UTC m=+290.854018978" Sep 29 19:13:02 crc kubenswrapper[4779]: I0929 19:13:02.982756 4779 generic.go:334] "Generic (PLEG): container finished" podID="23a92b93-fce1-410f-b088-394235c8d3b8" containerID="66618ca7f9dbce38af8ff121a3def63515e828e9a9e8cab03440bd266cad7408" exitCode=0 Sep 29 19:13:02 crc kubenswrapper[4779]: I0929 19:13:02.982963 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-9v764" event={"ID":"23a92b93-fce1-410f-b088-394235c8d3b8","Type":"ContainerDied","Data":"66618ca7f9dbce38af8ff121a3def63515e828e9a9e8cab03440bd266cad7408"} Sep 29 19:13:02 crc kubenswrapper[4779]: I0929 19:13:02.989033 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hvfth" event={"ID":"9bd42e07-23d9-49a8-b02d-bd11be36fc0c","Type":"ContainerStarted","Data":"83f97aae5005632d3a0a03b4e4da1e8540e00c47f230c6089282a8cc42fd784d"} Sep 29 19:13:02 crc kubenswrapper[4779]: I0929 19:13:02.993678 4779 generic.go:334] "Generic (PLEG): container finished" podID="97741d17-ab9c-415a-b310-38e8c914d91a" containerID="6828b5b58971a97fe63a4eb65dadee783a878ca26a3fb1601f617aba2e58c8d6" exitCode=0 Sep 29 19:13:02 crc kubenswrapper[4779]: I0929 19:13:02.993742 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-rtkr2" event={"ID":"97741d17-ab9c-415a-b310-38e8c914d91a","Type":"ContainerDied","Data":"6828b5b58971a97fe63a4eb65dadee783a878ca26a3fb1601f617aba2e58c8d6"} Sep 29 19:13:03 crc kubenswrapper[4779]: I0929 19:13:03.036479 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-hvfth" podStartSLOduration=4.243249981 podStartE2EDuration="7.03639316s" podCreationTimestamp="2025-09-29 19:12:56 +0000 UTC" firstStartedPulling="2025-09-29 19:12:57.921434305 +0000 UTC m=+288.805859405" lastFinishedPulling="2025-09-29 19:13:00.714577484 +0000 UTC m=+291.599002584" observedRunningTime="2025-09-29 19:13:03.012027689 +0000 UTC m=+293.896452879" watchObservedRunningTime="2025-09-29 19:13:03.03639316 +0000 UTC m=+293.920818300" Sep 29 19:13:04 crc kubenswrapper[4779]: I0929 19:13:04.000856 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-9v764" event={"ID":"23a92b93-fce1-410f-b088-394235c8d3b8","Type":"ContainerStarted","Data":"0f120306c3784da6566f6820a8c38b91ddf456cd7679552afa915deee5639cbb"} Sep 29 19:13:04 crc kubenswrapper[4779]: I0929 19:13:04.004114 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-rtkr2" event={"ID":"97741d17-ab9c-415a-b310-38e8c914d91a","Type":"ContainerStarted","Data":"df0fbec92bf03ff99476822a6c3a73be6aeb5cc2dca80a40272d2e3813a7b0fa"} Sep 29 19:13:04 crc kubenswrapper[4779]: I0929 19:13:04.027515 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-9v764" podStartSLOduration=2.522310924 podStartE2EDuration="6.027499942s" podCreationTimestamp="2025-09-29 19:12:58 +0000 UTC" firstStartedPulling="2025-09-29 19:12:59.958678773 +0000 UTC m=+290.843103873" lastFinishedPulling="2025-09-29 19:13:03.463867781 +0000 UTC m=+294.348292891" observedRunningTime="2025-09-29 19:13:04.025565576 +0000 UTC m=+294.909990676" watchObservedRunningTime="2025-09-29 19:13:04.027499942 +0000 UTC m=+294.911925042" Sep 29 19:13:06 crc kubenswrapper[4779]: I0929 19:13:06.190140 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-6w68s" Sep 29 19:13:06 crc kubenswrapper[4779]: I0929 19:13:06.190186 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-6w68s" Sep 29 19:13:06 crc kubenswrapper[4779]: I0929 19:13:06.246819 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-6w68s" Sep 29 19:13:06 crc kubenswrapper[4779]: I0929 19:13:06.267468 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-rtkr2" podStartSLOduration=4.648688063 podStartE2EDuration="8.267445854s" podCreationTimestamp="2025-09-29 19:12:58 +0000 UTC" firstStartedPulling="2025-09-29 19:12:59.95444924 +0000 UTC m=+290.838874360" lastFinishedPulling="2025-09-29 19:13:03.573207051 +0000 UTC m=+294.457632151" observedRunningTime="2025-09-29 19:13:04.043261164 +0000 UTC m=+294.927686274" watchObservedRunningTime="2025-09-29 19:13:06.267445854 +0000 UTC m=+297.151870984" Sep 29 19:13:06 crc kubenswrapper[4779]: I0929 19:13:06.382709 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-hvfth" Sep 29 19:13:06 crc kubenswrapper[4779]: I0929 19:13:06.382981 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-hvfth" Sep 29 19:13:06 crc kubenswrapper[4779]: I0929 19:13:06.441089 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-hvfth" Sep 29 19:13:07 crc kubenswrapper[4779]: I0929 19:13:07.058499 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-6w68s" Sep 29 19:13:07 crc kubenswrapper[4779]: I0929 19:13:07.080106 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-hvfth" Sep 29 19:13:08 crc kubenswrapper[4779]: I0929 19:13:08.624852 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-9v764" Sep 29 19:13:08 crc kubenswrapper[4779]: I0929 19:13:08.625646 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-9v764" Sep 29 19:13:08 crc kubenswrapper[4779]: I0929 19:13:08.675986 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-9v764" Sep 29 19:13:08 crc kubenswrapper[4779]: I0929 19:13:08.799569 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-rtkr2" Sep 29 19:13:08 crc kubenswrapper[4779]: I0929 19:13:08.799645 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-rtkr2" Sep 29 19:13:08 crc kubenswrapper[4779]: I0929 19:13:08.851162 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-rtkr2" Sep 29 19:13:09 crc kubenswrapper[4779]: I0929 19:13:09.095811 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-rtkr2" Sep 29 19:13:09 crc kubenswrapper[4779]: I0929 19:13:09.099768 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-9v764" Sep 29 19:14:13 crc kubenswrapper[4779]: I0929 19:14:13.785524 4779 patch_prober.go:28] interesting pod/machine-config-daemon-d5cnr container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 19:14:13 crc kubenswrapper[4779]: I0929 19:14:13.786236 4779 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" podUID="476bc421-1113-455e-bcc8-e207e47dad19" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 19:14:43 crc kubenswrapper[4779]: I0929 19:14:43.785285 4779 patch_prober.go:28] interesting pod/machine-config-daemon-d5cnr container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 19:14:43 crc kubenswrapper[4779]: I0929 19:14:43.785811 4779 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" podUID="476bc421-1113-455e-bcc8-e207e47dad19" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 19:15:00 crc kubenswrapper[4779]: I0929 19:15:00.148308 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319555-8fndn"] Sep 29 19:15:00 crc kubenswrapper[4779]: I0929 19:15:00.149907 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319555-8fndn" Sep 29 19:15:00 crc kubenswrapper[4779]: I0929 19:15:00.153944 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Sep 29 19:15:00 crc kubenswrapper[4779]: I0929 19:15:00.154779 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Sep 29 19:15:00 crc kubenswrapper[4779]: I0929 19:15:00.163232 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319555-8fndn"] Sep 29 19:15:00 crc kubenswrapper[4779]: I0929 19:15:00.171566 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/18a164cd-10b2-4913-87b3-8ee84cc1a8b4-config-volume\") pod \"collect-profiles-29319555-8fndn\" (UID: \"18a164cd-10b2-4913-87b3-8ee84cc1a8b4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319555-8fndn" Sep 29 19:15:00 crc kubenswrapper[4779]: I0929 19:15:00.171878 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/18a164cd-10b2-4913-87b3-8ee84cc1a8b4-secret-volume\") pod \"collect-profiles-29319555-8fndn\" (UID: \"18a164cd-10b2-4913-87b3-8ee84cc1a8b4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319555-8fndn" Sep 29 19:15:00 crc kubenswrapper[4779]: I0929 19:15:00.171943 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ztsxh\" (UniqueName: \"kubernetes.io/projected/18a164cd-10b2-4913-87b3-8ee84cc1a8b4-kube-api-access-ztsxh\") pod \"collect-profiles-29319555-8fndn\" (UID: \"18a164cd-10b2-4913-87b3-8ee84cc1a8b4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319555-8fndn" Sep 29 19:15:00 crc kubenswrapper[4779]: I0929 19:15:00.272636 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/18a164cd-10b2-4913-87b3-8ee84cc1a8b4-config-volume\") pod \"collect-profiles-29319555-8fndn\" (UID: \"18a164cd-10b2-4913-87b3-8ee84cc1a8b4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319555-8fndn" Sep 29 19:15:00 crc kubenswrapper[4779]: I0929 19:15:00.272753 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/18a164cd-10b2-4913-87b3-8ee84cc1a8b4-secret-volume\") pod \"collect-profiles-29319555-8fndn\" (UID: \"18a164cd-10b2-4913-87b3-8ee84cc1a8b4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319555-8fndn" Sep 29 19:15:00 crc kubenswrapper[4779]: I0929 19:15:00.272792 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ztsxh\" (UniqueName: \"kubernetes.io/projected/18a164cd-10b2-4913-87b3-8ee84cc1a8b4-kube-api-access-ztsxh\") pod \"collect-profiles-29319555-8fndn\" (UID: \"18a164cd-10b2-4913-87b3-8ee84cc1a8b4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319555-8fndn" Sep 29 19:15:00 crc kubenswrapper[4779]: I0929 19:15:00.274219 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/18a164cd-10b2-4913-87b3-8ee84cc1a8b4-config-volume\") pod \"collect-profiles-29319555-8fndn\" (UID: \"18a164cd-10b2-4913-87b3-8ee84cc1a8b4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319555-8fndn" Sep 29 19:15:00 crc kubenswrapper[4779]: I0929 19:15:00.284945 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/18a164cd-10b2-4913-87b3-8ee84cc1a8b4-secret-volume\") pod \"collect-profiles-29319555-8fndn\" (UID: \"18a164cd-10b2-4913-87b3-8ee84cc1a8b4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319555-8fndn" Sep 29 19:15:00 crc kubenswrapper[4779]: I0929 19:15:00.291602 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ztsxh\" (UniqueName: \"kubernetes.io/projected/18a164cd-10b2-4913-87b3-8ee84cc1a8b4-kube-api-access-ztsxh\") pod \"collect-profiles-29319555-8fndn\" (UID: \"18a164cd-10b2-4913-87b3-8ee84cc1a8b4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319555-8fndn" Sep 29 19:15:00 crc kubenswrapper[4779]: I0929 19:15:00.478972 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319555-8fndn" Sep 29 19:15:00 crc kubenswrapper[4779]: I0929 19:15:00.763145 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319555-8fndn"] Sep 29 19:15:01 crc kubenswrapper[4779]: I0929 19:15:01.747456 4779 generic.go:334] "Generic (PLEG): container finished" podID="18a164cd-10b2-4913-87b3-8ee84cc1a8b4" containerID="34d910cb77404a1b7e311671dd5eccb66f82340b0742d59394ed6284949e0cdb" exitCode=0 Sep 29 19:15:01 crc kubenswrapper[4779]: I0929 19:15:01.747532 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29319555-8fndn" event={"ID":"18a164cd-10b2-4913-87b3-8ee84cc1a8b4","Type":"ContainerDied","Data":"34d910cb77404a1b7e311671dd5eccb66f82340b0742d59394ed6284949e0cdb"} Sep 29 19:15:01 crc kubenswrapper[4779]: I0929 19:15:01.748683 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29319555-8fndn" event={"ID":"18a164cd-10b2-4913-87b3-8ee84cc1a8b4","Type":"ContainerStarted","Data":"85d71f36fc4e83cefb2b05477c596deb36c23b11a2e68847ded2a938b60e44bc"} Sep 29 19:15:02 crc kubenswrapper[4779]: I0929 19:15:02.968286 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319555-8fndn" Sep 29 19:15:03 crc kubenswrapper[4779]: I0929 19:15:03.009255 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/18a164cd-10b2-4913-87b3-8ee84cc1a8b4-config-volume\") pod \"18a164cd-10b2-4913-87b3-8ee84cc1a8b4\" (UID: \"18a164cd-10b2-4913-87b3-8ee84cc1a8b4\") " Sep 29 19:15:03 crc kubenswrapper[4779]: I0929 19:15:03.009296 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/18a164cd-10b2-4913-87b3-8ee84cc1a8b4-secret-volume\") pod \"18a164cd-10b2-4913-87b3-8ee84cc1a8b4\" (UID: \"18a164cd-10b2-4913-87b3-8ee84cc1a8b4\") " Sep 29 19:15:03 crc kubenswrapper[4779]: I0929 19:15:03.009369 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ztsxh\" (UniqueName: \"kubernetes.io/projected/18a164cd-10b2-4913-87b3-8ee84cc1a8b4-kube-api-access-ztsxh\") pod \"18a164cd-10b2-4913-87b3-8ee84cc1a8b4\" (UID: \"18a164cd-10b2-4913-87b3-8ee84cc1a8b4\") " Sep 29 19:15:03 crc kubenswrapper[4779]: I0929 19:15:03.009781 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/18a164cd-10b2-4913-87b3-8ee84cc1a8b4-config-volume" (OuterVolumeSpecName: "config-volume") pod "18a164cd-10b2-4913-87b3-8ee84cc1a8b4" (UID: "18a164cd-10b2-4913-87b3-8ee84cc1a8b4"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:15:03 crc kubenswrapper[4779]: I0929 19:15:03.017030 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/18a164cd-10b2-4913-87b3-8ee84cc1a8b4-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "18a164cd-10b2-4913-87b3-8ee84cc1a8b4" (UID: "18a164cd-10b2-4913-87b3-8ee84cc1a8b4"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:15:03 crc kubenswrapper[4779]: I0929 19:15:03.021597 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/18a164cd-10b2-4913-87b3-8ee84cc1a8b4-kube-api-access-ztsxh" (OuterVolumeSpecName: "kube-api-access-ztsxh") pod "18a164cd-10b2-4913-87b3-8ee84cc1a8b4" (UID: "18a164cd-10b2-4913-87b3-8ee84cc1a8b4"). InnerVolumeSpecName "kube-api-access-ztsxh". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:15:03 crc kubenswrapper[4779]: I0929 19:15:03.110675 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ztsxh\" (UniqueName: \"kubernetes.io/projected/18a164cd-10b2-4913-87b3-8ee84cc1a8b4-kube-api-access-ztsxh\") on node \"crc\" DevicePath \"\"" Sep 29 19:15:03 crc kubenswrapper[4779]: I0929 19:15:03.110731 4779 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/18a164cd-10b2-4913-87b3-8ee84cc1a8b4-config-volume\") on node \"crc\" DevicePath \"\"" Sep 29 19:15:03 crc kubenswrapper[4779]: I0929 19:15:03.110749 4779 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/18a164cd-10b2-4913-87b3-8ee84cc1a8b4-secret-volume\") on node \"crc\" DevicePath \"\"" Sep 29 19:15:03 crc kubenswrapper[4779]: I0929 19:15:03.764020 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29319555-8fndn" event={"ID":"18a164cd-10b2-4913-87b3-8ee84cc1a8b4","Type":"ContainerDied","Data":"85d71f36fc4e83cefb2b05477c596deb36c23b11a2e68847ded2a938b60e44bc"} Sep 29 19:15:03 crc kubenswrapper[4779]: I0929 19:15:03.764077 4779 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="85d71f36fc4e83cefb2b05477c596deb36c23b11a2e68847ded2a938b60e44bc" Sep 29 19:15:03 crc kubenswrapper[4779]: I0929 19:15:03.764105 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319555-8fndn" Sep 29 19:15:13 crc kubenswrapper[4779]: I0929 19:15:13.785477 4779 patch_prober.go:28] interesting pod/machine-config-daemon-d5cnr container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 19:15:13 crc kubenswrapper[4779]: I0929 19:15:13.787536 4779 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" podUID="476bc421-1113-455e-bcc8-e207e47dad19" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 19:15:13 crc kubenswrapper[4779]: I0929 19:15:13.787670 4779 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" Sep 29 19:15:13 crc kubenswrapper[4779]: I0929 19:15:13.788772 4779 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"723d137f12ddc65960396536e98fe092ad24733cca2a22667ac4ae7a3c589f72"} pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 29 19:15:13 crc kubenswrapper[4779]: I0929 19:15:13.788921 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" podUID="476bc421-1113-455e-bcc8-e207e47dad19" containerName="machine-config-daemon" containerID="cri-o://723d137f12ddc65960396536e98fe092ad24733cca2a22667ac4ae7a3c589f72" gracePeriod=600 Sep 29 19:15:14 crc kubenswrapper[4779]: I0929 19:15:14.841987 4779 generic.go:334] "Generic (PLEG): container finished" podID="476bc421-1113-455e-bcc8-e207e47dad19" containerID="723d137f12ddc65960396536e98fe092ad24733cca2a22667ac4ae7a3c589f72" exitCode=0 Sep 29 19:15:14 crc kubenswrapper[4779]: I0929 19:15:14.842132 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" event={"ID":"476bc421-1113-455e-bcc8-e207e47dad19","Type":"ContainerDied","Data":"723d137f12ddc65960396536e98fe092ad24733cca2a22667ac4ae7a3c589f72"} Sep 29 19:15:14 crc kubenswrapper[4779]: I0929 19:15:14.842549 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" event={"ID":"476bc421-1113-455e-bcc8-e207e47dad19","Type":"ContainerStarted","Data":"b3ba67db5fe746a0ad4fd3ae08a4d76a8d4a2ecb2a07aee398af84195906c334"} Sep 29 19:15:14 crc kubenswrapper[4779]: I0929 19:15:14.842591 4779 scope.go:117] "RemoveContainer" containerID="7168ecfffda2ef4f7782650466ccebb8abcff3293fcd33d44eb4ee24abf6339c" Sep 29 19:15:34 crc kubenswrapper[4779]: I0929 19:15:34.560177 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-kmjhv"] Sep 29 19:15:34 crc kubenswrapper[4779]: E0929 19:15:34.561070 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="18a164cd-10b2-4913-87b3-8ee84cc1a8b4" containerName="collect-profiles" Sep 29 19:15:34 crc kubenswrapper[4779]: I0929 19:15:34.561093 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="18a164cd-10b2-4913-87b3-8ee84cc1a8b4" containerName="collect-profiles" Sep 29 19:15:34 crc kubenswrapper[4779]: I0929 19:15:34.561295 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="18a164cd-10b2-4913-87b3-8ee84cc1a8b4" containerName="collect-profiles" Sep 29 19:15:34 crc kubenswrapper[4779]: I0929 19:15:34.561881 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-kmjhv" Sep 29 19:15:34 crc kubenswrapper[4779]: I0929 19:15:34.574245 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-kmjhv"] Sep 29 19:15:34 crc kubenswrapper[4779]: I0929 19:15:34.742296 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/7e280161-1bfa-47a6-8167-c0c6180ae675-ca-trust-extracted\") pod \"image-registry-66df7c8f76-kmjhv\" (UID: \"7e280161-1bfa-47a6-8167-c0c6180ae675\") " pod="openshift-image-registry/image-registry-66df7c8f76-kmjhv" Sep 29 19:15:34 crc kubenswrapper[4779]: I0929 19:15:34.742355 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/7e280161-1bfa-47a6-8167-c0c6180ae675-registry-tls\") pod \"image-registry-66df7c8f76-kmjhv\" (UID: \"7e280161-1bfa-47a6-8167-c0c6180ae675\") " pod="openshift-image-registry/image-registry-66df7c8f76-kmjhv" Sep 29 19:15:34 crc kubenswrapper[4779]: I0929 19:15:34.742372 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/7e280161-1bfa-47a6-8167-c0c6180ae675-bound-sa-token\") pod \"image-registry-66df7c8f76-kmjhv\" (UID: \"7e280161-1bfa-47a6-8167-c0c6180ae675\") " pod="openshift-image-registry/image-registry-66df7c8f76-kmjhv" Sep 29 19:15:34 crc kubenswrapper[4779]: I0929 19:15:34.742394 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/7e280161-1bfa-47a6-8167-c0c6180ae675-registry-certificates\") pod \"image-registry-66df7c8f76-kmjhv\" (UID: \"7e280161-1bfa-47a6-8167-c0c6180ae675\") " pod="openshift-image-registry/image-registry-66df7c8f76-kmjhv" Sep 29 19:15:34 crc kubenswrapper[4779]: I0929 19:15:34.742490 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-kmjhv\" (UID: \"7e280161-1bfa-47a6-8167-c0c6180ae675\") " pod="openshift-image-registry/image-registry-66df7c8f76-kmjhv" Sep 29 19:15:34 crc kubenswrapper[4779]: I0929 19:15:34.742606 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/7e280161-1bfa-47a6-8167-c0c6180ae675-installation-pull-secrets\") pod \"image-registry-66df7c8f76-kmjhv\" (UID: \"7e280161-1bfa-47a6-8167-c0c6180ae675\") " pod="openshift-image-registry/image-registry-66df7c8f76-kmjhv" Sep 29 19:15:34 crc kubenswrapper[4779]: I0929 19:15:34.742635 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7pkgg\" (UniqueName: \"kubernetes.io/projected/7e280161-1bfa-47a6-8167-c0c6180ae675-kube-api-access-7pkgg\") pod \"image-registry-66df7c8f76-kmjhv\" (UID: \"7e280161-1bfa-47a6-8167-c0c6180ae675\") " pod="openshift-image-registry/image-registry-66df7c8f76-kmjhv" Sep 29 19:15:34 crc kubenswrapper[4779]: I0929 19:15:34.742672 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/7e280161-1bfa-47a6-8167-c0c6180ae675-trusted-ca\") pod \"image-registry-66df7c8f76-kmjhv\" (UID: \"7e280161-1bfa-47a6-8167-c0c6180ae675\") " pod="openshift-image-registry/image-registry-66df7c8f76-kmjhv" Sep 29 19:15:34 crc kubenswrapper[4779]: I0929 19:15:34.776251 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-kmjhv\" (UID: \"7e280161-1bfa-47a6-8167-c0c6180ae675\") " pod="openshift-image-registry/image-registry-66df7c8f76-kmjhv" Sep 29 19:15:34 crc kubenswrapper[4779]: I0929 19:15:34.844186 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/7e280161-1bfa-47a6-8167-c0c6180ae675-installation-pull-secrets\") pod \"image-registry-66df7c8f76-kmjhv\" (UID: \"7e280161-1bfa-47a6-8167-c0c6180ae675\") " pod="openshift-image-registry/image-registry-66df7c8f76-kmjhv" Sep 29 19:15:34 crc kubenswrapper[4779]: I0929 19:15:34.844257 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7pkgg\" (UniqueName: \"kubernetes.io/projected/7e280161-1bfa-47a6-8167-c0c6180ae675-kube-api-access-7pkgg\") pod \"image-registry-66df7c8f76-kmjhv\" (UID: \"7e280161-1bfa-47a6-8167-c0c6180ae675\") " pod="openshift-image-registry/image-registry-66df7c8f76-kmjhv" Sep 29 19:15:34 crc kubenswrapper[4779]: I0929 19:15:34.844308 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/7e280161-1bfa-47a6-8167-c0c6180ae675-trusted-ca\") pod \"image-registry-66df7c8f76-kmjhv\" (UID: \"7e280161-1bfa-47a6-8167-c0c6180ae675\") " pod="openshift-image-registry/image-registry-66df7c8f76-kmjhv" Sep 29 19:15:34 crc kubenswrapper[4779]: I0929 19:15:34.844414 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/7e280161-1bfa-47a6-8167-c0c6180ae675-ca-trust-extracted\") pod \"image-registry-66df7c8f76-kmjhv\" (UID: \"7e280161-1bfa-47a6-8167-c0c6180ae675\") " pod="openshift-image-registry/image-registry-66df7c8f76-kmjhv" Sep 29 19:15:34 crc kubenswrapper[4779]: I0929 19:15:34.844450 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/7e280161-1bfa-47a6-8167-c0c6180ae675-registry-tls\") pod \"image-registry-66df7c8f76-kmjhv\" (UID: \"7e280161-1bfa-47a6-8167-c0c6180ae675\") " pod="openshift-image-registry/image-registry-66df7c8f76-kmjhv" Sep 29 19:15:34 crc kubenswrapper[4779]: I0929 19:15:34.844506 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/7e280161-1bfa-47a6-8167-c0c6180ae675-bound-sa-token\") pod \"image-registry-66df7c8f76-kmjhv\" (UID: \"7e280161-1bfa-47a6-8167-c0c6180ae675\") " pod="openshift-image-registry/image-registry-66df7c8f76-kmjhv" Sep 29 19:15:34 crc kubenswrapper[4779]: I0929 19:15:34.844575 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/7e280161-1bfa-47a6-8167-c0c6180ae675-registry-certificates\") pod \"image-registry-66df7c8f76-kmjhv\" (UID: \"7e280161-1bfa-47a6-8167-c0c6180ae675\") " pod="openshift-image-registry/image-registry-66df7c8f76-kmjhv" Sep 29 19:15:34 crc kubenswrapper[4779]: I0929 19:15:34.846227 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/7e280161-1bfa-47a6-8167-c0c6180ae675-ca-trust-extracted\") pod \"image-registry-66df7c8f76-kmjhv\" (UID: \"7e280161-1bfa-47a6-8167-c0c6180ae675\") " pod="openshift-image-registry/image-registry-66df7c8f76-kmjhv" Sep 29 19:15:34 crc kubenswrapper[4779]: I0929 19:15:34.847126 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/7e280161-1bfa-47a6-8167-c0c6180ae675-trusted-ca\") pod \"image-registry-66df7c8f76-kmjhv\" (UID: \"7e280161-1bfa-47a6-8167-c0c6180ae675\") " pod="openshift-image-registry/image-registry-66df7c8f76-kmjhv" Sep 29 19:15:34 crc kubenswrapper[4779]: I0929 19:15:34.847472 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/7e280161-1bfa-47a6-8167-c0c6180ae675-registry-certificates\") pod \"image-registry-66df7c8f76-kmjhv\" (UID: \"7e280161-1bfa-47a6-8167-c0c6180ae675\") " pod="openshift-image-registry/image-registry-66df7c8f76-kmjhv" Sep 29 19:15:34 crc kubenswrapper[4779]: I0929 19:15:34.856455 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/7e280161-1bfa-47a6-8167-c0c6180ae675-installation-pull-secrets\") pod \"image-registry-66df7c8f76-kmjhv\" (UID: \"7e280161-1bfa-47a6-8167-c0c6180ae675\") " pod="openshift-image-registry/image-registry-66df7c8f76-kmjhv" Sep 29 19:15:34 crc kubenswrapper[4779]: I0929 19:15:34.858112 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/7e280161-1bfa-47a6-8167-c0c6180ae675-registry-tls\") pod \"image-registry-66df7c8f76-kmjhv\" (UID: \"7e280161-1bfa-47a6-8167-c0c6180ae675\") " pod="openshift-image-registry/image-registry-66df7c8f76-kmjhv" Sep 29 19:15:34 crc kubenswrapper[4779]: I0929 19:15:34.864962 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/7e280161-1bfa-47a6-8167-c0c6180ae675-bound-sa-token\") pod \"image-registry-66df7c8f76-kmjhv\" (UID: \"7e280161-1bfa-47a6-8167-c0c6180ae675\") " pod="openshift-image-registry/image-registry-66df7c8f76-kmjhv" Sep 29 19:15:34 crc kubenswrapper[4779]: I0929 19:15:34.874442 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7pkgg\" (UniqueName: \"kubernetes.io/projected/7e280161-1bfa-47a6-8167-c0c6180ae675-kube-api-access-7pkgg\") pod \"image-registry-66df7c8f76-kmjhv\" (UID: \"7e280161-1bfa-47a6-8167-c0c6180ae675\") " pod="openshift-image-registry/image-registry-66df7c8f76-kmjhv" Sep 29 19:15:34 crc kubenswrapper[4779]: I0929 19:15:34.881527 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-kmjhv" Sep 29 19:15:35 crc kubenswrapper[4779]: I0929 19:15:35.161463 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-kmjhv"] Sep 29 19:15:35 crc kubenswrapper[4779]: W0929 19:15:35.170473 4779 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7e280161_1bfa_47a6_8167_c0c6180ae675.slice/crio-ccb81c65fc8e029db371af4524aa88c4b0cfff0245ff6f0d3bda4c1d99d71594 WatchSource:0}: Error finding container ccb81c65fc8e029db371af4524aa88c4b0cfff0245ff6f0d3bda4c1d99d71594: Status 404 returned error can't find the container with id ccb81c65fc8e029db371af4524aa88c4b0cfff0245ff6f0d3bda4c1d99d71594 Sep 29 19:15:35 crc kubenswrapper[4779]: I0929 19:15:35.983659 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-kmjhv" event={"ID":"7e280161-1bfa-47a6-8167-c0c6180ae675","Type":"ContainerStarted","Data":"f6930fb7dfae4e77512c79123ace3c84aabf07170c3b6e2b9a1f5a1017e1f91e"} Sep 29 19:15:35 crc kubenswrapper[4779]: I0929 19:15:35.983755 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-kmjhv" event={"ID":"7e280161-1bfa-47a6-8167-c0c6180ae675","Type":"ContainerStarted","Data":"ccb81c65fc8e029db371af4524aa88c4b0cfff0245ff6f0d3bda4c1d99d71594"} Sep 29 19:15:35 crc kubenswrapper[4779]: I0929 19:15:35.983950 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-66df7c8f76-kmjhv" Sep 29 19:15:36 crc kubenswrapper[4779]: I0929 19:15:36.017407 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-66df7c8f76-kmjhv" podStartSLOduration=2.017311731 podStartE2EDuration="2.017311731s" podCreationTimestamp="2025-09-29 19:15:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 19:15:36.012038129 +0000 UTC m=+446.896463299" watchObservedRunningTime="2025-09-29 19:15:36.017311731 +0000 UTC m=+446.901736871" Sep 29 19:15:54 crc kubenswrapper[4779]: I0929 19:15:54.887817 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-66df7c8f76-kmjhv" Sep 29 19:15:54 crc kubenswrapper[4779]: I0929 19:15:54.947634 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-tgwxw"] Sep 29 19:16:19 crc kubenswrapper[4779]: I0929 19:16:19.991978 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-image-registry/image-registry-697d97f7c8-tgwxw" podUID="08924ea4-79d3-439f-8bdb-150f807221d9" containerName="registry" containerID="cri-o://95f76bd6186ac2fed5ac2348b447a6c5d7e2d25804c23aef88a60cce7e66d933" gracePeriod=30 Sep 29 19:16:20 crc kubenswrapper[4779]: I0929 19:16:20.293016 4779 generic.go:334] "Generic (PLEG): container finished" podID="08924ea4-79d3-439f-8bdb-150f807221d9" containerID="95f76bd6186ac2fed5ac2348b447a6c5d7e2d25804c23aef88a60cce7e66d933" exitCode=0 Sep 29 19:16:20 crc kubenswrapper[4779]: I0929 19:16:20.293079 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-tgwxw" event={"ID":"08924ea4-79d3-439f-8bdb-150f807221d9","Type":"ContainerDied","Data":"95f76bd6186ac2fed5ac2348b447a6c5d7e2d25804c23aef88a60cce7e66d933"} Sep 29 19:16:20 crc kubenswrapper[4779]: I0929 19:16:20.515903 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-tgwxw" Sep 29 19:16:20 crc kubenswrapper[4779]: I0929 19:16:20.626899 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/08924ea4-79d3-439f-8bdb-150f807221d9-ca-trust-extracted\") pod \"08924ea4-79d3-439f-8bdb-150f807221d9\" (UID: \"08924ea4-79d3-439f-8bdb-150f807221d9\") " Sep 29 19:16:20 crc kubenswrapper[4779]: I0929 19:16:20.627264 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/08924ea4-79d3-439f-8bdb-150f807221d9-registry-tls\") pod \"08924ea4-79d3-439f-8bdb-150f807221d9\" (UID: \"08924ea4-79d3-439f-8bdb-150f807221d9\") " Sep 29 19:16:20 crc kubenswrapper[4779]: I0929 19:16:20.627435 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-storage\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"08924ea4-79d3-439f-8bdb-150f807221d9\" (UID: \"08924ea4-79d3-439f-8bdb-150f807221d9\") " Sep 29 19:16:20 crc kubenswrapper[4779]: I0929 19:16:20.627470 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/08924ea4-79d3-439f-8bdb-150f807221d9-registry-certificates\") pod \"08924ea4-79d3-439f-8bdb-150f807221d9\" (UID: \"08924ea4-79d3-439f-8bdb-150f807221d9\") " Sep 29 19:16:20 crc kubenswrapper[4779]: I0929 19:16:20.627503 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/08924ea4-79d3-439f-8bdb-150f807221d9-bound-sa-token\") pod \"08924ea4-79d3-439f-8bdb-150f807221d9\" (UID: \"08924ea4-79d3-439f-8bdb-150f807221d9\") " Sep 29 19:16:20 crc kubenswrapper[4779]: I0929 19:16:20.627547 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/08924ea4-79d3-439f-8bdb-150f807221d9-installation-pull-secrets\") pod \"08924ea4-79d3-439f-8bdb-150f807221d9\" (UID: \"08924ea4-79d3-439f-8bdb-150f807221d9\") " Sep 29 19:16:20 crc kubenswrapper[4779]: I0929 19:16:20.627576 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-428jl\" (UniqueName: \"kubernetes.io/projected/08924ea4-79d3-439f-8bdb-150f807221d9-kube-api-access-428jl\") pod \"08924ea4-79d3-439f-8bdb-150f807221d9\" (UID: \"08924ea4-79d3-439f-8bdb-150f807221d9\") " Sep 29 19:16:20 crc kubenswrapper[4779]: I0929 19:16:20.627627 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/08924ea4-79d3-439f-8bdb-150f807221d9-trusted-ca\") pod \"08924ea4-79d3-439f-8bdb-150f807221d9\" (UID: \"08924ea4-79d3-439f-8bdb-150f807221d9\") " Sep 29 19:16:20 crc kubenswrapper[4779]: I0929 19:16:20.628710 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/08924ea4-79d3-439f-8bdb-150f807221d9-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "08924ea4-79d3-439f-8bdb-150f807221d9" (UID: "08924ea4-79d3-439f-8bdb-150f807221d9"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:16:20 crc kubenswrapper[4779]: I0929 19:16:20.629190 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/08924ea4-79d3-439f-8bdb-150f807221d9-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "08924ea4-79d3-439f-8bdb-150f807221d9" (UID: "08924ea4-79d3-439f-8bdb-150f807221d9"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:16:20 crc kubenswrapper[4779]: I0929 19:16:20.636185 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/08924ea4-79d3-439f-8bdb-150f807221d9-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "08924ea4-79d3-439f-8bdb-150f807221d9" (UID: "08924ea4-79d3-439f-8bdb-150f807221d9"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:16:20 crc kubenswrapper[4779]: I0929 19:16:20.639310 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/08924ea4-79d3-439f-8bdb-150f807221d9-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "08924ea4-79d3-439f-8bdb-150f807221d9" (UID: "08924ea4-79d3-439f-8bdb-150f807221d9"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:16:20 crc kubenswrapper[4779]: I0929 19:16:20.640571 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "registry-storage") pod "08924ea4-79d3-439f-8bdb-150f807221d9" (UID: "08924ea4-79d3-439f-8bdb-150f807221d9"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Sep 29 19:16:20 crc kubenswrapper[4779]: I0929 19:16:20.643867 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/08924ea4-79d3-439f-8bdb-150f807221d9-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "08924ea4-79d3-439f-8bdb-150f807221d9" (UID: "08924ea4-79d3-439f-8bdb-150f807221d9"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 19:16:20 crc kubenswrapper[4779]: I0929 19:16:20.646825 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/08924ea4-79d3-439f-8bdb-150f807221d9-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "08924ea4-79d3-439f-8bdb-150f807221d9" (UID: "08924ea4-79d3-439f-8bdb-150f807221d9"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:16:20 crc kubenswrapper[4779]: I0929 19:16:20.647284 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/08924ea4-79d3-439f-8bdb-150f807221d9-kube-api-access-428jl" (OuterVolumeSpecName: "kube-api-access-428jl") pod "08924ea4-79d3-439f-8bdb-150f807221d9" (UID: "08924ea4-79d3-439f-8bdb-150f807221d9"). InnerVolumeSpecName "kube-api-access-428jl". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:16:20 crc kubenswrapper[4779]: I0929 19:16:20.729444 4779 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/08924ea4-79d3-439f-8bdb-150f807221d9-trusted-ca\") on node \"crc\" DevicePath \"\"" Sep 29 19:16:20 crc kubenswrapper[4779]: I0929 19:16:20.729513 4779 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/08924ea4-79d3-439f-8bdb-150f807221d9-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Sep 29 19:16:20 crc kubenswrapper[4779]: I0929 19:16:20.729536 4779 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/08924ea4-79d3-439f-8bdb-150f807221d9-registry-tls\") on node \"crc\" DevicePath \"\"" Sep 29 19:16:20 crc kubenswrapper[4779]: I0929 19:16:20.729555 4779 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/08924ea4-79d3-439f-8bdb-150f807221d9-registry-certificates\") on node \"crc\" DevicePath \"\"" Sep 29 19:16:20 crc kubenswrapper[4779]: I0929 19:16:20.729579 4779 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/08924ea4-79d3-439f-8bdb-150f807221d9-bound-sa-token\") on node \"crc\" DevicePath \"\"" Sep 29 19:16:20 crc kubenswrapper[4779]: I0929 19:16:20.729599 4779 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/08924ea4-79d3-439f-8bdb-150f807221d9-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Sep 29 19:16:20 crc kubenswrapper[4779]: I0929 19:16:20.729617 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-428jl\" (UniqueName: \"kubernetes.io/projected/08924ea4-79d3-439f-8bdb-150f807221d9-kube-api-access-428jl\") on node \"crc\" DevicePath \"\"" Sep 29 19:16:21 crc kubenswrapper[4779]: I0929 19:16:21.301804 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-tgwxw" event={"ID":"08924ea4-79d3-439f-8bdb-150f807221d9","Type":"ContainerDied","Data":"53168aa818ef5ef7020735fe334d54694ddb2a69e03946cd787b8d01873ad074"} Sep 29 19:16:21 crc kubenswrapper[4779]: I0929 19:16:21.301877 4779 scope.go:117] "RemoveContainer" containerID="95f76bd6186ac2fed5ac2348b447a6c5d7e2d25804c23aef88a60cce7e66d933" Sep 29 19:16:21 crc kubenswrapper[4779]: I0929 19:16:21.301936 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-tgwxw" Sep 29 19:16:21 crc kubenswrapper[4779]: I0929 19:16:21.389157 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-tgwxw"] Sep 29 19:16:21 crc kubenswrapper[4779]: I0929 19:16:21.396824 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-tgwxw"] Sep 29 19:16:21 crc kubenswrapper[4779]: I0929 19:16:21.778116 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="08924ea4-79d3-439f-8bdb-150f807221d9" path="/var/lib/kubelet/pods/08924ea4-79d3-439f-8bdb-150f807221d9/volumes" Sep 29 19:16:25 crc kubenswrapper[4779]: I0929 19:16:25.374287 4779 patch_prober.go:28] interesting pod/image-registry-697d97f7c8-tgwxw container/registry namespace/openshift-image-registry: Readiness probe status=failure output="Get \"https://10.217.0.30:5000/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Sep 29 19:16:25 crc kubenswrapper[4779]: I0929 19:16:25.374698 4779 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-image-registry/image-registry-697d97f7c8-tgwxw" podUID="08924ea4-79d3-439f-8bdb-150f807221d9" containerName="registry" probeResult="failure" output="Get \"https://10.217.0.30:5000/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Sep 29 19:17:09 crc kubenswrapper[4779]: I0929 19:17:09.912241 4779 scope.go:117] "RemoveContainer" containerID="e6d3d37723ad7c470644352d54d285f9bf42946e93f0a87358a40a38ebfae26a" Sep 29 19:17:43 crc kubenswrapper[4779]: I0929 19:17:43.784741 4779 patch_prober.go:28] interesting pod/machine-config-daemon-d5cnr container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 19:17:43 crc kubenswrapper[4779]: I0929 19:17:43.785208 4779 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" podUID="476bc421-1113-455e-bcc8-e207e47dad19" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 19:18:08 crc kubenswrapper[4779]: I0929 19:18:08.749160 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-cainjector-7f985d654d-zpt2c"] Sep 29 19:18:08 crc kubenswrapper[4779]: E0929 19:18:08.750064 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="08924ea4-79d3-439f-8bdb-150f807221d9" containerName="registry" Sep 29 19:18:08 crc kubenswrapper[4779]: I0929 19:18:08.750083 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="08924ea4-79d3-439f-8bdb-150f807221d9" containerName="registry" Sep 29 19:18:08 crc kubenswrapper[4779]: I0929 19:18:08.750548 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="08924ea4-79d3-439f-8bdb-150f807221d9" containerName="registry" Sep 29 19:18:08 crc kubenswrapper[4779]: I0929 19:18:08.751025 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-cainjector-7f985d654d-zpt2c" Sep 29 19:18:08 crc kubenswrapper[4779]: I0929 19:18:08.754910 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager"/"kube-root-ca.crt" Sep 29 19:18:08 crc kubenswrapper[4779]: I0929 19:18:08.754947 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager"/"openshift-service-ca.crt" Sep 29 19:18:08 crc kubenswrapper[4779]: I0929 19:18:08.755554 4779 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-cainjector-dockercfg-zp464" Sep 29 19:18:08 crc kubenswrapper[4779]: I0929 19:18:08.765782 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-cainjector-7f985d654d-zpt2c"] Sep 29 19:18:08 crc kubenswrapper[4779]: I0929 19:18:08.772049 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-5b446d88c5-g5jq2"] Sep 29 19:18:08 crc kubenswrapper[4779]: I0929 19:18:08.772634 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-5b446d88c5-g5jq2" Sep 29 19:18:08 crc kubenswrapper[4779]: I0929 19:18:08.777975 4779 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-dockercfg-brj94" Sep 29 19:18:08 crc kubenswrapper[4779]: I0929 19:18:08.779488 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-webhook-5655c58dd6-bf8nn"] Sep 29 19:18:08 crc kubenswrapper[4779]: I0929 19:18:08.783718 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-webhook-5655c58dd6-bf8nn" Sep 29 19:18:08 crc kubenswrapper[4779]: I0929 19:18:08.788476 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-webhook-5655c58dd6-bf8nn"] Sep 29 19:18:08 crc kubenswrapper[4779]: I0929 19:18:08.788797 4779 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-webhook-dockercfg-b9tpr" Sep 29 19:18:08 crc kubenswrapper[4779]: I0929 19:18:08.797830 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-5b446d88c5-g5jq2"] Sep 29 19:18:08 crc kubenswrapper[4779]: I0929 19:18:08.845367 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sz65c\" (UniqueName: \"kubernetes.io/projected/d0e20784-de41-4e9c-8c95-f047e75f30fd-kube-api-access-sz65c\") pod \"cert-manager-cainjector-7f985d654d-zpt2c\" (UID: \"d0e20784-de41-4e9c-8c95-f047e75f30fd\") " pod="cert-manager/cert-manager-cainjector-7f985d654d-zpt2c" Sep 29 19:18:08 crc kubenswrapper[4779]: I0929 19:18:08.946987 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sz65c\" (UniqueName: \"kubernetes.io/projected/d0e20784-de41-4e9c-8c95-f047e75f30fd-kube-api-access-sz65c\") pod \"cert-manager-cainjector-7f985d654d-zpt2c\" (UID: \"d0e20784-de41-4e9c-8c95-f047e75f30fd\") " pod="cert-manager/cert-manager-cainjector-7f985d654d-zpt2c" Sep 29 19:18:08 crc kubenswrapper[4779]: I0929 19:18:08.947128 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l6n85\" (UniqueName: \"kubernetes.io/projected/3566f49d-2c94-40d8-b5b1-aa51cc7c043b-kube-api-access-l6n85\") pod \"cert-manager-webhook-5655c58dd6-bf8nn\" (UID: \"3566f49d-2c94-40d8-b5b1-aa51cc7c043b\") " pod="cert-manager/cert-manager-webhook-5655c58dd6-bf8nn" Sep 29 19:18:08 crc kubenswrapper[4779]: I0929 19:18:08.947306 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9td72\" (UniqueName: \"kubernetes.io/projected/599cf28e-c7ed-4c1a-a84d-ae90ec0708ba-kube-api-access-9td72\") pod \"cert-manager-5b446d88c5-g5jq2\" (UID: \"599cf28e-c7ed-4c1a-a84d-ae90ec0708ba\") " pod="cert-manager/cert-manager-5b446d88c5-g5jq2" Sep 29 19:18:08 crc kubenswrapper[4779]: I0929 19:18:08.970241 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sz65c\" (UniqueName: \"kubernetes.io/projected/d0e20784-de41-4e9c-8c95-f047e75f30fd-kube-api-access-sz65c\") pod \"cert-manager-cainjector-7f985d654d-zpt2c\" (UID: \"d0e20784-de41-4e9c-8c95-f047e75f30fd\") " pod="cert-manager/cert-manager-cainjector-7f985d654d-zpt2c" Sep 29 19:18:09 crc kubenswrapper[4779]: I0929 19:18:09.049103 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l6n85\" (UniqueName: \"kubernetes.io/projected/3566f49d-2c94-40d8-b5b1-aa51cc7c043b-kube-api-access-l6n85\") pod \"cert-manager-webhook-5655c58dd6-bf8nn\" (UID: \"3566f49d-2c94-40d8-b5b1-aa51cc7c043b\") " pod="cert-manager/cert-manager-webhook-5655c58dd6-bf8nn" Sep 29 19:18:09 crc kubenswrapper[4779]: I0929 19:18:09.049181 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9td72\" (UniqueName: \"kubernetes.io/projected/599cf28e-c7ed-4c1a-a84d-ae90ec0708ba-kube-api-access-9td72\") pod \"cert-manager-5b446d88c5-g5jq2\" (UID: \"599cf28e-c7ed-4c1a-a84d-ae90ec0708ba\") " pod="cert-manager/cert-manager-5b446d88c5-g5jq2" Sep 29 19:18:09 crc kubenswrapper[4779]: I0929 19:18:09.068451 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l6n85\" (UniqueName: \"kubernetes.io/projected/3566f49d-2c94-40d8-b5b1-aa51cc7c043b-kube-api-access-l6n85\") pod \"cert-manager-webhook-5655c58dd6-bf8nn\" (UID: \"3566f49d-2c94-40d8-b5b1-aa51cc7c043b\") " pod="cert-manager/cert-manager-webhook-5655c58dd6-bf8nn" Sep 29 19:18:09 crc kubenswrapper[4779]: I0929 19:18:09.070436 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-cainjector-7f985d654d-zpt2c" Sep 29 19:18:09 crc kubenswrapper[4779]: I0929 19:18:09.071618 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9td72\" (UniqueName: \"kubernetes.io/projected/599cf28e-c7ed-4c1a-a84d-ae90ec0708ba-kube-api-access-9td72\") pod \"cert-manager-5b446d88c5-g5jq2\" (UID: \"599cf28e-c7ed-4c1a-a84d-ae90ec0708ba\") " pod="cert-manager/cert-manager-5b446d88c5-g5jq2" Sep 29 19:18:09 crc kubenswrapper[4779]: I0929 19:18:09.098145 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-5b446d88c5-g5jq2" Sep 29 19:18:09 crc kubenswrapper[4779]: I0929 19:18:09.108310 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-webhook-5655c58dd6-bf8nn" Sep 29 19:18:09 crc kubenswrapper[4779]: I0929 19:18:09.376988 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-webhook-5655c58dd6-bf8nn"] Sep 29 19:18:09 crc kubenswrapper[4779]: I0929 19:18:09.384765 4779 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Sep 29 19:18:09 crc kubenswrapper[4779]: I0929 19:18:09.565579 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-cainjector-7f985d654d-zpt2c"] Sep 29 19:18:09 crc kubenswrapper[4779]: I0929 19:18:09.573924 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-5b446d88c5-g5jq2"] Sep 29 19:18:09 crc kubenswrapper[4779]: W0929 19:18:09.582005 4779 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd0e20784_de41_4e9c_8c95_f047e75f30fd.slice/crio-3ba9ad401b3e8c87f81eb71b7ac7d2013679c4a92ac54accf954cc60d0db044d WatchSource:0}: Error finding container 3ba9ad401b3e8c87f81eb71b7ac7d2013679c4a92ac54accf954cc60d0db044d: Status 404 returned error can't find the container with id 3ba9ad401b3e8c87f81eb71b7ac7d2013679c4a92ac54accf954cc60d0db044d Sep 29 19:18:09 crc kubenswrapper[4779]: I0929 19:18:09.965399 4779 scope.go:117] "RemoveContainer" containerID="690f2500525c371ab67351244cd385f0a75588b0ea05117ea08058b45a134264" Sep 29 19:18:09 crc kubenswrapper[4779]: I0929 19:18:09.994903 4779 scope.go:117] "RemoveContainer" containerID="35faa773a5e4556cba6afa8832d1d2f96261f5231bb604c4a467ad860efa2f85" Sep 29 19:18:10 crc kubenswrapper[4779]: I0929 19:18:10.026289 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-webhook-5655c58dd6-bf8nn" event={"ID":"3566f49d-2c94-40d8-b5b1-aa51cc7c043b","Type":"ContainerStarted","Data":"04701edc36924db115af6d638f7b61bb545583919c7675f88c3f4c9e66715bec"} Sep 29 19:18:10 crc kubenswrapper[4779]: I0929 19:18:10.029810 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-cainjector-7f985d654d-zpt2c" event={"ID":"d0e20784-de41-4e9c-8c95-f047e75f30fd","Type":"ContainerStarted","Data":"3ba9ad401b3e8c87f81eb71b7ac7d2013679c4a92ac54accf954cc60d0db044d"} Sep 29 19:18:10 crc kubenswrapper[4779]: I0929 19:18:10.031243 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-5b446d88c5-g5jq2" event={"ID":"599cf28e-c7ed-4c1a-a84d-ae90ec0708ba","Type":"ContainerStarted","Data":"8737e0b3dc81c3ac348d32425a058fa68e47f643aba94ad796c2aed7f1b3313f"} Sep 29 19:18:12 crc kubenswrapper[4779]: I0929 19:18:12.047595 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-webhook-5655c58dd6-bf8nn" event={"ID":"3566f49d-2c94-40d8-b5b1-aa51cc7c043b","Type":"ContainerStarted","Data":"9e844ff2885f1594eb81a7b1513737663670b499f3d41faf1ff73f94dba59e0b"} Sep 29 19:18:12 crc kubenswrapper[4779]: I0929 19:18:12.047970 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="cert-manager/cert-manager-webhook-5655c58dd6-bf8nn" Sep 29 19:18:12 crc kubenswrapper[4779]: I0929 19:18:12.066599 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-webhook-5655c58dd6-bf8nn" podStartSLOduration=2.005022619 podStartE2EDuration="4.066572207s" podCreationTimestamp="2025-09-29 19:18:08 +0000 UTC" firstStartedPulling="2025-09-29 19:18:09.384480459 +0000 UTC m=+600.268905559" lastFinishedPulling="2025-09-29 19:18:11.446030037 +0000 UTC m=+602.330455147" observedRunningTime="2025-09-29 19:18:12.060655975 +0000 UTC m=+602.945081085" watchObservedRunningTime="2025-09-29 19:18:12.066572207 +0000 UTC m=+602.950997307" Sep 29 19:18:13 crc kubenswrapper[4779]: I0929 19:18:13.054088 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-cainjector-7f985d654d-zpt2c" event={"ID":"d0e20784-de41-4e9c-8c95-f047e75f30fd","Type":"ContainerStarted","Data":"91c0398e13670946beb8b819651c13b39df5fdba99715743a369d5c896ee5fc5"} Sep 29 19:18:13 crc kubenswrapper[4779]: I0929 19:18:13.056166 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-5b446d88c5-g5jq2" event={"ID":"599cf28e-c7ed-4c1a-a84d-ae90ec0708ba","Type":"ContainerStarted","Data":"51aa299ed400b693d24e46ff0473423a1cee3885a817cc8b6e86ded7e4d18b47"} Sep 29 19:18:13 crc kubenswrapper[4779]: I0929 19:18:13.081651 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-cainjector-7f985d654d-zpt2c" podStartSLOduration=1.898965906 podStartE2EDuration="5.081629433s" podCreationTimestamp="2025-09-29 19:18:08 +0000 UTC" firstStartedPulling="2025-09-29 19:18:09.586221589 +0000 UTC m=+600.470646689" lastFinishedPulling="2025-09-29 19:18:12.768885086 +0000 UTC m=+603.653310216" observedRunningTime="2025-09-29 19:18:13.079466344 +0000 UTC m=+603.963891474" watchObservedRunningTime="2025-09-29 19:18:13.081629433 +0000 UTC m=+603.966054543" Sep 29 19:18:13 crc kubenswrapper[4779]: I0929 19:18:13.100567 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-5b446d88c5-g5jq2" podStartSLOduration=1.8967215739999999 podStartE2EDuration="5.100541958s" podCreationTimestamp="2025-09-29 19:18:08 +0000 UTC" firstStartedPulling="2025-09-29 19:18:09.57929354 +0000 UTC m=+600.463718680" lastFinishedPulling="2025-09-29 19:18:12.783113923 +0000 UTC m=+603.667539064" observedRunningTime="2025-09-29 19:18:13.097950488 +0000 UTC m=+603.982375588" watchObservedRunningTime="2025-09-29 19:18:13.100541958 +0000 UTC m=+603.984967058" Sep 29 19:18:13 crc kubenswrapper[4779]: I0929 19:18:13.785207 4779 patch_prober.go:28] interesting pod/machine-config-daemon-d5cnr container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 19:18:13 crc kubenswrapper[4779]: I0929 19:18:13.785313 4779 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" podUID="476bc421-1113-455e-bcc8-e207e47dad19" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 19:18:19 crc kubenswrapper[4779]: I0929 19:18:19.112701 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="cert-manager/cert-manager-webhook-5655c58dd6-bf8nn" Sep 29 19:18:19 crc kubenswrapper[4779]: I0929 19:18:19.610849 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-42vjg"] Sep 29 19:18:19 crc kubenswrapper[4779]: I0929 19:18:19.611393 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-42vjg" podUID="046df2ef-fb75-4d32-93e6-17b36af0a7c2" containerName="ovn-controller" containerID="cri-o://6dcd0277739f6d5e5c90c617485f053c1c3996192f15b081c660741987ec2611" gracePeriod=30 Sep 29 19:18:19 crc kubenswrapper[4779]: I0929 19:18:19.611447 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-42vjg" podUID="046df2ef-fb75-4d32-93e6-17b36af0a7c2" containerName="nbdb" containerID="cri-o://0d18ddf7528c789be80d32cd6921ada5ed114d01130b7be97e73ec6d9168267e" gracePeriod=30 Sep 29 19:18:19 crc kubenswrapper[4779]: I0929 19:18:19.611546 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-42vjg" podUID="046df2ef-fb75-4d32-93e6-17b36af0a7c2" containerName="northd" containerID="cri-o://6b6ffbcb52a6e250cd4b4bc4e8c089ef94babaaf0db836fec9f477450464eebe" gracePeriod=30 Sep 29 19:18:19 crc kubenswrapper[4779]: I0929 19:18:19.611549 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-42vjg" podUID="046df2ef-fb75-4d32-93e6-17b36af0a7c2" containerName="ovn-acl-logging" containerID="cri-o://76304a56be996b646d706c0e7df995bb1518962396c7558304b1627e5c215c11" gracePeriod=30 Sep 29 19:18:19 crc kubenswrapper[4779]: I0929 19:18:19.611628 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-42vjg" podUID="046df2ef-fb75-4d32-93e6-17b36af0a7c2" containerName="kube-rbac-proxy-ovn-metrics" containerID="cri-o://1555e96e70a9119cf2298d91512b1d8cffb303b4327d0e20bdf686e7e36e4461" gracePeriod=30 Sep 29 19:18:19 crc kubenswrapper[4779]: I0929 19:18:19.611692 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-42vjg" podUID="046df2ef-fb75-4d32-93e6-17b36af0a7c2" containerName="sbdb" containerID="cri-o://c8993f5be0b77b752f60e7346f609aaeef20db2f0e72cbebde06a11fa7d2f5bb" gracePeriod=30 Sep 29 19:18:19 crc kubenswrapper[4779]: I0929 19:18:19.611737 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-42vjg" podUID="046df2ef-fb75-4d32-93e6-17b36af0a7c2" containerName="kube-rbac-proxy-node" containerID="cri-o://629d703a556665e86ce3b0a5af995157440c71c8ab7d29b9259865cd624543c7" gracePeriod=30 Sep 29 19:18:19 crc kubenswrapper[4779]: I0929 19:18:19.635901 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-42vjg" podUID="046df2ef-fb75-4d32-93e6-17b36af0a7c2" containerName="ovnkube-controller" containerID="cri-o://05fd1e4418540394817185026240ded324511397fc0990e09794511ca0417885" gracePeriod=30 Sep 29 19:18:19 crc kubenswrapper[4779]: I0929 19:18:19.931146 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-42vjg_046df2ef-fb75-4d32-93e6-17b36af0a7c2/ovnkube-controller/3.log" Sep 29 19:18:19 crc kubenswrapper[4779]: I0929 19:18:19.933500 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-42vjg_046df2ef-fb75-4d32-93e6-17b36af0a7c2/ovn-acl-logging/0.log" Sep 29 19:18:19 crc kubenswrapper[4779]: I0929 19:18:19.933872 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-42vjg_046df2ef-fb75-4d32-93e6-17b36af0a7c2/ovn-controller/0.log" Sep 29 19:18:19 crc kubenswrapper[4779]: I0929 19:18:19.937035 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-42vjg" Sep 29 19:18:19 crc kubenswrapper[4779]: I0929 19:18:19.996665 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-g7fr7"] Sep 29 19:18:19 crc kubenswrapper[4779]: E0929 19:18:19.996935 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="046df2ef-fb75-4d32-93e6-17b36af0a7c2" containerName="kube-rbac-proxy-node" Sep 29 19:18:19 crc kubenswrapper[4779]: I0929 19:18:19.996955 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="046df2ef-fb75-4d32-93e6-17b36af0a7c2" containerName="kube-rbac-proxy-node" Sep 29 19:18:19 crc kubenswrapper[4779]: E0929 19:18:19.996969 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="046df2ef-fb75-4d32-93e6-17b36af0a7c2" containerName="nbdb" Sep 29 19:18:19 crc kubenswrapper[4779]: I0929 19:18:19.996977 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="046df2ef-fb75-4d32-93e6-17b36af0a7c2" containerName="nbdb" Sep 29 19:18:19 crc kubenswrapper[4779]: E0929 19:18:19.996991 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="046df2ef-fb75-4d32-93e6-17b36af0a7c2" containerName="ovnkube-controller" Sep 29 19:18:19 crc kubenswrapper[4779]: I0929 19:18:19.997001 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="046df2ef-fb75-4d32-93e6-17b36af0a7c2" containerName="ovnkube-controller" Sep 29 19:18:19 crc kubenswrapper[4779]: E0929 19:18:19.997016 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="046df2ef-fb75-4d32-93e6-17b36af0a7c2" containerName="northd" Sep 29 19:18:19 crc kubenswrapper[4779]: I0929 19:18:19.997026 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="046df2ef-fb75-4d32-93e6-17b36af0a7c2" containerName="northd" Sep 29 19:18:19 crc kubenswrapper[4779]: E0929 19:18:19.997040 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="046df2ef-fb75-4d32-93e6-17b36af0a7c2" containerName="ovn-acl-logging" Sep 29 19:18:19 crc kubenswrapper[4779]: I0929 19:18:19.997050 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="046df2ef-fb75-4d32-93e6-17b36af0a7c2" containerName="ovn-acl-logging" Sep 29 19:18:19 crc kubenswrapper[4779]: E0929 19:18:19.997062 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="046df2ef-fb75-4d32-93e6-17b36af0a7c2" containerName="ovnkube-controller" Sep 29 19:18:19 crc kubenswrapper[4779]: I0929 19:18:19.997071 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="046df2ef-fb75-4d32-93e6-17b36af0a7c2" containerName="ovnkube-controller" Sep 29 19:18:19 crc kubenswrapper[4779]: E0929 19:18:19.997082 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="046df2ef-fb75-4d32-93e6-17b36af0a7c2" containerName="ovnkube-controller" Sep 29 19:18:19 crc kubenswrapper[4779]: I0929 19:18:19.997092 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="046df2ef-fb75-4d32-93e6-17b36af0a7c2" containerName="ovnkube-controller" Sep 29 19:18:19 crc kubenswrapper[4779]: E0929 19:18:19.997112 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="046df2ef-fb75-4d32-93e6-17b36af0a7c2" containerName="kubecfg-setup" Sep 29 19:18:19 crc kubenswrapper[4779]: I0929 19:18:19.997123 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="046df2ef-fb75-4d32-93e6-17b36af0a7c2" containerName="kubecfg-setup" Sep 29 19:18:19 crc kubenswrapper[4779]: E0929 19:18:19.997137 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="046df2ef-fb75-4d32-93e6-17b36af0a7c2" containerName="kube-rbac-proxy-ovn-metrics" Sep 29 19:18:19 crc kubenswrapper[4779]: I0929 19:18:19.997147 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="046df2ef-fb75-4d32-93e6-17b36af0a7c2" containerName="kube-rbac-proxy-ovn-metrics" Sep 29 19:18:19 crc kubenswrapper[4779]: E0929 19:18:19.997162 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="046df2ef-fb75-4d32-93e6-17b36af0a7c2" containerName="ovn-controller" Sep 29 19:18:19 crc kubenswrapper[4779]: I0929 19:18:19.997171 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="046df2ef-fb75-4d32-93e6-17b36af0a7c2" containerName="ovn-controller" Sep 29 19:18:19 crc kubenswrapper[4779]: E0929 19:18:19.997184 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="046df2ef-fb75-4d32-93e6-17b36af0a7c2" containerName="sbdb" Sep 29 19:18:19 crc kubenswrapper[4779]: I0929 19:18:19.997194 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="046df2ef-fb75-4d32-93e6-17b36af0a7c2" containerName="sbdb" Sep 29 19:18:19 crc kubenswrapper[4779]: I0929 19:18:19.997350 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="046df2ef-fb75-4d32-93e6-17b36af0a7c2" containerName="ovn-acl-logging" Sep 29 19:18:19 crc kubenswrapper[4779]: I0929 19:18:19.997369 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="046df2ef-fb75-4d32-93e6-17b36af0a7c2" containerName="ovnkube-controller" Sep 29 19:18:19 crc kubenswrapper[4779]: I0929 19:18:19.997381 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="046df2ef-fb75-4d32-93e6-17b36af0a7c2" containerName="ovnkube-controller" Sep 29 19:18:19 crc kubenswrapper[4779]: I0929 19:18:19.997391 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="046df2ef-fb75-4d32-93e6-17b36af0a7c2" containerName="kube-rbac-proxy-ovn-metrics" Sep 29 19:18:19 crc kubenswrapper[4779]: I0929 19:18:19.997401 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="046df2ef-fb75-4d32-93e6-17b36af0a7c2" containerName="nbdb" Sep 29 19:18:19 crc kubenswrapper[4779]: I0929 19:18:19.997414 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="046df2ef-fb75-4d32-93e6-17b36af0a7c2" containerName="ovnkube-controller" Sep 29 19:18:19 crc kubenswrapper[4779]: I0929 19:18:19.997425 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="046df2ef-fb75-4d32-93e6-17b36af0a7c2" containerName="ovnkube-controller" Sep 29 19:18:19 crc kubenswrapper[4779]: I0929 19:18:19.997442 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="046df2ef-fb75-4d32-93e6-17b36af0a7c2" containerName="sbdb" Sep 29 19:18:19 crc kubenswrapper[4779]: I0929 19:18:19.997457 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="046df2ef-fb75-4d32-93e6-17b36af0a7c2" containerName="ovn-controller" Sep 29 19:18:19 crc kubenswrapper[4779]: I0929 19:18:19.997468 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="046df2ef-fb75-4d32-93e6-17b36af0a7c2" containerName="northd" Sep 29 19:18:19 crc kubenswrapper[4779]: I0929 19:18:19.997480 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="046df2ef-fb75-4d32-93e6-17b36af0a7c2" containerName="kube-rbac-proxy-node" Sep 29 19:18:19 crc kubenswrapper[4779]: E0929 19:18:19.997635 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="046df2ef-fb75-4d32-93e6-17b36af0a7c2" containerName="ovnkube-controller" Sep 29 19:18:19 crc kubenswrapper[4779]: I0929 19:18:19.997649 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="046df2ef-fb75-4d32-93e6-17b36af0a7c2" containerName="ovnkube-controller" Sep 29 19:18:19 crc kubenswrapper[4779]: E0929 19:18:19.997664 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="046df2ef-fb75-4d32-93e6-17b36af0a7c2" containerName="ovnkube-controller" Sep 29 19:18:19 crc kubenswrapper[4779]: I0929 19:18:19.997674 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="046df2ef-fb75-4d32-93e6-17b36af0a7c2" containerName="ovnkube-controller" Sep 29 19:18:19 crc kubenswrapper[4779]: I0929 19:18:19.997824 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="046df2ef-fb75-4d32-93e6-17b36af0a7c2" containerName="ovnkube-controller" Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.000160 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-g7fr7" Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.104265 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-jfbb6_3ac24bbf-c37a-4253-be71-8d8f15cfd48e/kube-multus/2.log" Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.104861 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-jfbb6_3ac24bbf-c37a-4253-be71-8d8f15cfd48e/kube-multus/1.log" Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.104926 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/046df2ef-fb75-4d32-93e6-17b36af0a7c2-run-ovn\") pod \"046df2ef-fb75-4d32-93e6-17b36af0a7c2\" (UID: \"046df2ef-fb75-4d32-93e6-17b36af0a7c2\") " Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.104930 4779 generic.go:334] "Generic (PLEG): container finished" podID="3ac24bbf-c37a-4253-be71-8d8f15cfd48e" containerID="5c0164ef77822b4eb85e02d13c7c209f76c7e034e09c640eb410639572e171b5" exitCode=2 Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.104956 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/046df2ef-fb75-4d32-93e6-17b36af0a7c2-log-socket\") pod \"046df2ef-fb75-4d32-93e6-17b36af0a7c2\" (UID: \"046df2ef-fb75-4d32-93e6-17b36af0a7c2\") " Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.104981 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/046df2ef-fb75-4d32-93e6-17b36af0a7c2-etc-openvswitch\") pod \"046df2ef-fb75-4d32-93e6-17b36af0a7c2\" (UID: \"046df2ef-fb75-4d32-93e6-17b36af0a7c2\") " Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.105010 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/046df2ef-fb75-4d32-93e6-17b36af0a7c2-ovn-node-metrics-cert\") pod \"046df2ef-fb75-4d32-93e6-17b36af0a7c2\" (UID: \"046df2ef-fb75-4d32-93e6-17b36af0a7c2\") " Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.105017 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-jfbb6" event={"ID":"3ac24bbf-c37a-4253-be71-8d8f15cfd48e","Type":"ContainerDied","Data":"5c0164ef77822b4eb85e02d13c7c209f76c7e034e09c640eb410639572e171b5"} Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.105030 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/046df2ef-fb75-4d32-93e6-17b36af0a7c2-run-openvswitch\") pod \"046df2ef-fb75-4d32-93e6-17b36af0a7c2\" (UID: \"046df2ef-fb75-4d32-93e6-17b36af0a7c2\") " Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.105054 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x2kg5\" (UniqueName: \"kubernetes.io/projected/046df2ef-fb75-4d32-93e6-17b36af0a7c2-kube-api-access-x2kg5\") pod \"046df2ef-fb75-4d32-93e6-17b36af0a7c2\" (UID: \"046df2ef-fb75-4d32-93e6-17b36af0a7c2\") " Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.105082 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/046df2ef-fb75-4d32-93e6-17b36af0a7c2-run-systemd\") pod \"046df2ef-fb75-4d32-93e6-17b36af0a7c2\" (UID: \"046df2ef-fb75-4d32-93e6-17b36af0a7c2\") " Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.105108 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/046df2ef-fb75-4d32-93e6-17b36af0a7c2-host-run-netns\") pod \"046df2ef-fb75-4d32-93e6-17b36af0a7c2\" (UID: \"046df2ef-fb75-4d32-93e6-17b36af0a7c2\") " Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.105141 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/046df2ef-fb75-4d32-93e6-17b36af0a7c2-systemd-units\") pod \"046df2ef-fb75-4d32-93e6-17b36af0a7c2\" (UID: \"046df2ef-fb75-4d32-93e6-17b36af0a7c2\") " Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.105173 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/046df2ef-fb75-4d32-93e6-17b36af0a7c2-ovnkube-script-lib\") pod \"046df2ef-fb75-4d32-93e6-17b36af0a7c2\" (UID: \"046df2ef-fb75-4d32-93e6-17b36af0a7c2\") " Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.105149 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/046df2ef-fb75-4d32-93e6-17b36af0a7c2-log-socket" (OuterVolumeSpecName: "log-socket") pod "046df2ef-fb75-4d32-93e6-17b36af0a7c2" (UID: "046df2ef-fb75-4d32-93e6-17b36af0a7c2"). InnerVolumeSpecName "log-socket". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.105202 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/046df2ef-fb75-4d32-93e6-17b36af0a7c2-run-ovn" (OuterVolumeSpecName: "run-ovn") pod "046df2ef-fb75-4d32-93e6-17b36af0a7c2" (UID: "046df2ef-fb75-4d32-93e6-17b36af0a7c2"). InnerVolumeSpecName "run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.105226 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/046df2ef-fb75-4d32-93e6-17b36af0a7c2-host-kubelet" (OuterVolumeSpecName: "host-kubelet") pod "046df2ef-fb75-4d32-93e6-17b36af0a7c2" (UID: "046df2ef-fb75-4d32-93e6-17b36af0a7c2"). InnerVolumeSpecName "host-kubelet". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.105053 4779 scope.go:117] "RemoveContainer" containerID="0a42b5ee3be8c80c6772c05f938a0f0be5896c66157b38f8b36cc3f9e03a950d" Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.105261 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/046df2ef-fb75-4d32-93e6-17b36af0a7c2-etc-openvswitch" (OuterVolumeSpecName: "etc-openvswitch") pod "046df2ef-fb75-4d32-93e6-17b36af0a7c2" (UID: "046df2ef-fb75-4d32-93e6-17b36af0a7c2"). InnerVolumeSpecName "etc-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.105193 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/046df2ef-fb75-4d32-93e6-17b36af0a7c2-host-kubelet\") pod \"046df2ef-fb75-4d32-93e6-17b36af0a7c2\" (UID: \"046df2ef-fb75-4d32-93e6-17b36af0a7c2\") " Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.105470 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/046df2ef-fb75-4d32-93e6-17b36af0a7c2-env-overrides\") pod \"046df2ef-fb75-4d32-93e6-17b36af0a7c2\" (UID: \"046df2ef-fb75-4d32-93e6-17b36af0a7c2\") " Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.105511 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/046df2ef-fb75-4d32-93e6-17b36af0a7c2-host-cni-netd\") pod \"046df2ef-fb75-4d32-93e6-17b36af0a7c2\" (UID: \"046df2ef-fb75-4d32-93e6-17b36af0a7c2\") " Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.105538 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/046df2ef-fb75-4d32-93e6-17b36af0a7c2-host-slash\") pod \"046df2ef-fb75-4d32-93e6-17b36af0a7c2\" (UID: \"046df2ef-fb75-4d32-93e6-17b36af0a7c2\") " Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.105575 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/046df2ef-fb75-4d32-93e6-17b36af0a7c2-host-run-ovn-kubernetes\") pod \"046df2ef-fb75-4d32-93e6-17b36af0a7c2\" (UID: \"046df2ef-fb75-4d32-93e6-17b36af0a7c2\") " Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.105598 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/046df2ef-fb75-4d32-93e6-17b36af0a7c2-node-log\") pod \"046df2ef-fb75-4d32-93e6-17b36af0a7c2\" (UID: \"046df2ef-fb75-4d32-93e6-17b36af0a7c2\") " Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.105624 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/046df2ef-fb75-4d32-93e6-17b36af0a7c2-ovnkube-config\") pod \"046df2ef-fb75-4d32-93e6-17b36af0a7c2\" (UID: \"046df2ef-fb75-4d32-93e6-17b36af0a7c2\") " Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.105644 4779 scope.go:117] "RemoveContainer" containerID="5c0164ef77822b4eb85e02d13c7c209f76c7e034e09c640eb410639572e171b5" Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.105676 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/046df2ef-fb75-4d32-93e6-17b36af0a7c2-var-lib-openvswitch" (OuterVolumeSpecName: "var-lib-openvswitch") pod "046df2ef-fb75-4d32-93e6-17b36af0a7c2" (UID: "046df2ef-fb75-4d32-93e6-17b36af0a7c2"). InnerVolumeSpecName "var-lib-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.105709 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/046df2ef-fb75-4d32-93e6-17b36af0a7c2-host-run-netns" (OuterVolumeSpecName: "host-run-netns") pod "046df2ef-fb75-4d32-93e6-17b36af0a7c2" (UID: "046df2ef-fb75-4d32-93e6-17b36af0a7c2"). InnerVolumeSpecName "host-run-netns". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.105782 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/046df2ef-fb75-4d32-93e6-17b36af0a7c2-systemd-units" (OuterVolumeSpecName: "systemd-units") pod "046df2ef-fb75-4d32-93e6-17b36af0a7c2" (UID: "046df2ef-fb75-4d32-93e6-17b36af0a7c2"). InnerVolumeSpecName "systemd-units". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.105891 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/046df2ef-fb75-4d32-93e6-17b36af0a7c2-run-openvswitch" (OuterVolumeSpecName: "run-openvswitch") pod "046df2ef-fb75-4d32-93e6-17b36af0a7c2" (UID: "046df2ef-fb75-4d32-93e6-17b36af0a7c2"). InnerVolumeSpecName "run-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.106145 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/046df2ef-fb75-4d32-93e6-17b36af0a7c2-host-run-ovn-kubernetes" (OuterVolumeSpecName: "host-run-ovn-kubernetes") pod "046df2ef-fb75-4d32-93e6-17b36af0a7c2" (UID: "046df2ef-fb75-4d32-93e6-17b36af0a7c2"). InnerVolumeSpecName "host-run-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.106177 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/046df2ef-fb75-4d32-93e6-17b36af0a7c2-host-cni-netd" (OuterVolumeSpecName: "host-cni-netd") pod "046df2ef-fb75-4d32-93e6-17b36af0a7c2" (UID: "046df2ef-fb75-4d32-93e6-17b36af0a7c2"). InnerVolumeSpecName "host-cni-netd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 29 19:18:20 crc kubenswrapper[4779]: E0929 19:18:20.106192 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 20s restarting failed container=kube-multus pod=multus-jfbb6_openshift-multus(3ac24bbf-c37a-4253-be71-8d8f15cfd48e)\"" pod="openshift-multus/multus-jfbb6" podUID="3ac24bbf-c37a-4253-be71-8d8f15cfd48e" Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.106202 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/046df2ef-fb75-4d32-93e6-17b36af0a7c2-host-slash" (OuterVolumeSpecName: "host-slash") pod "046df2ef-fb75-4d32-93e6-17b36af0a7c2" (UID: "046df2ef-fb75-4d32-93e6-17b36af0a7c2"). InnerVolumeSpecName "host-slash". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.105649 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/046df2ef-fb75-4d32-93e6-17b36af0a7c2-var-lib-openvswitch\") pod \"046df2ef-fb75-4d32-93e6-17b36af0a7c2\" (UID: \"046df2ef-fb75-4d32-93e6-17b36af0a7c2\") " Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.106217 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/046df2ef-fb75-4d32-93e6-17b36af0a7c2-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "046df2ef-fb75-4d32-93e6-17b36af0a7c2" (UID: "046df2ef-fb75-4d32-93e6-17b36af0a7c2"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.106275 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/046df2ef-fb75-4d32-93e6-17b36af0a7c2-host-var-lib-cni-networks-ovn-kubernetes\") pod \"046df2ef-fb75-4d32-93e6-17b36af0a7c2\" (UID: \"046df2ef-fb75-4d32-93e6-17b36af0a7c2\") " Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.106307 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/046df2ef-fb75-4d32-93e6-17b36af0a7c2-host-cni-bin\") pod \"046df2ef-fb75-4d32-93e6-17b36af0a7c2\" (UID: \"046df2ef-fb75-4d32-93e6-17b36af0a7c2\") " Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.106269 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/046df2ef-fb75-4d32-93e6-17b36af0a7c2-node-log" (OuterVolumeSpecName: "node-log") pod "046df2ef-fb75-4d32-93e6-17b36af0a7c2" (UID: "046df2ef-fb75-4d32-93e6-17b36af0a7c2"). InnerVolumeSpecName "node-log". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.106340 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/046df2ef-fb75-4d32-93e6-17b36af0a7c2-host-var-lib-cni-networks-ovn-kubernetes" (OuterVolumeSpecName: "host-var-lib-cni-networks-ovn-kubernetes") pod "046df2ef-fb75-4d32-93e6-17b36af0a7c2" (UID: "046df2ef-fb75-4d32-93e6-17b36af0a7c2"). InnerVolumeSpecName "host-var-lib-cni-networks-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.106508 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/046df2ef-fb75-4d32-93e6-17b36af0a7c2-host-cni-bin" (OuterVolumeSpecName: "host-cni-bin") pod "046df2ef-fb75-4d32-93e6-17b36af0a7c2" (UID: "046df2ef-fb75-4d32-93e6-17b36af0a7c2"). InnerVolumeSpecName "host-cni-bin". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.106527 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/c4e0517b-668f-4378-b358-67cbb7018878-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-g7fr7\" (UID: \"c4e0517b-668f-4378-b358-67cbb7018878\") " pod="openshift-ovn-kubernetes/ovnkube-node-g7fr7" Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.106567 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/c4e0517b-668f-4378-b358-67cbb7018878-env-overrides\") pod \"ovnkube-node-g7fr7\" (UID: \"c4e0517b-668f-4378-b358-67cbb7018878\") " pod="openshift-ovn-kubernetes/ovnkube-node-g7fr7" Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.106622 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/c4e0517b-668f-4378-b358-67cbb7018878-host-run-ovn-kubernetes\") pod \"ovnkube-node-g7fr7\" (UID: \"c4e0517b-668f-4378-b358-67cbb7018878\") " pod="openshift-ovn-kubernetes/ovnkube-node-g7fr7" Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.106710 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/c4e0517b-668f-4378-b358-67cbb7018878-var-lib-openvswitch\") pod \"ovnkube-node-g7fr7\" (UID: \"c4e0517b-668f-4378-b358-67cbb7018878\") " pod="openshift-ovn-kubernetes/ovnkube-node-g7fr7" Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.106738 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/046df2ef-fb75-4d32-93e6-17b36af0a7c2-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "046df2ef-fb75-4d32-93e6-17b36af0a7c2" (UID: "046df2ef-fb75-4d32-93e6-17b36af0a7c2"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.106763 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/c4e0517b-668f-4378-b358-67cbb7018878-ovnkube-config\") pod \"ovnkube-node-g7fr7\" (UID: \"c4e0517b-668f-4378-b358-67cbb7018878\") " pod="openshift-ovn-kubernetes/ovnkube-node-g7fr7" Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.106867 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/c4e0517b-668f-4378-b358-67cbb7018878-run-ovn\") pod \"ovnkube-node-g7fr7\" (UID: \"c4e0517b-668f-4378-b358-67cbb7018878\") " pod="openshift-ovn-kubernetes/ovnkube-node-g7fr7" Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.106900 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/c4e0517b-668f-4378-b358-67cbb7018878-run-systemd\") pod \"ovnkube-node-g7fr7\" (UID: \"c4e0517b-668f-4378-b358-67cbb7018878\") " pod="openshift-ovn-kubernetes/ovnkube-node-g7fr7" Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.107005 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/c4e0517b-668f-4378-b358-67cbb7018878-ovn-node-metrics-cert\") pod \"ovnkube-node-g7fr7\" (UID: \"c4e0517b-668f-4378-b358-67cbb7018878\") " pod="openshift-ovn-kubernetes/ovnkube-node-g7fr7" Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.107065 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/c4e0517b-668f-4378-b358-67cbb7018878-host-kubelet\") pod \"ovnkube-node-g7fr7\" (UID: \"c4e0517b-668f-4378-b358-67cbb7018878\") " pod="openshift-ovn-kubernetes/ovnkube-node-g7fr7" Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.107111 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/c4e0517b-668f-4378-b358-67cbb7018878-host-cni-netd\") pod \"ovnkube-node-g7fr7\" (UID: \"c4e0517b-668f-4378-b358-67cbb7018878\") " pod="openshift-ovn-kubernetes/ovnkube-node-g7fr7" Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.107207 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/c4e0517b-668f-4378-b358-67cbb7018878-host-run-netns\") pod \"ovnkube-node-g7fr7\" (UID: \"c4e0517b-668f-4378-b358-67cbb7018878\") " pod="openshift-ovn-kubernetes/ovnkube-node-g7fr7" Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.107263 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/c4e0517b-668f-4378-b358-67cbb7018878-host-slash\") pod \"ovnkube-node-g7fr7\" (UID: \"c4e0517b-668f-4378-b358-67cbb7018878\") " pod="openshift-ovn-kubernetes/ovnkube-node-g7fr7" Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.107304 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/c4e0517b-668f-4378-b358-67cbb7018878-host-cni-bin\") pod \"ovnkube-node-g7fr7\" (UID: \"c4e0517b-668f-4378-b358-67cbb7018878\") " pod="openshift-ovn-kubernetes/ovnkube-node-g7fr7" Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.107384 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m6bzz\" (UniqueName: \"kubernetes.io/projected/c4e0517b-668f-4378-b358-67cbb7018878-kube-api-access-m6bzz\") pod \"ovnkube-node-g7fr7\" (UID: \"c4e0517b-668f-4378-b358-67cbb7018878\") " pod="openshift-ovn-kubernetes/ovnkube-node-g7fr7" Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.107434 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/c4e0517b-668f-4378-b358-67cbb7018878-systemd-units\") pod \"ovnkube-node-g7fr7\" (UID: \"c4e0517b-668f-4378-b358-67cbb7018878\") " pod="openshift-ovn-kubernetes/ovnkube-node-g7fr7" Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.107527 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/c4e0517b-668f-4378-b358-67cbb7018878-ovnkube-script-lib\") pod \"ovnkube-node-g7fr7\" (UID: \"c4e0517b-668f-4378-b358-67cbb7018878\") " pod="openshift-ovn-kubernetes/ovnkube-node-g7fr7" Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.107591 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/c4e0517b-668f-4378-b358-67cbb7018878-run-openvswitch\") pod \"ovnkube-node-g7fr7\" (UID: \"c4e0517b-668f-4378-b358-67cbb7018878\") " pod="openshift-ovn-kubernetes/ovnkube-node-g7fr7" Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.107632 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/c4e0517b-668f-4378-b358-67cbb7018878-log-socket\") pod \"ovnkube-node-g7fr7\" (UID: \"c4e0517b-668f-4378-b358-67cbb7018878\") " pod="openshift-ovn-kubernetes/ovnkube-node-g7fr7" Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.107708 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/c4e0517b-668f-4378-b358-67cbb7018878-etc-openvswitch\") pod \"ovnkube-node-g7fr7\" (UID: \"c4e0517b-668f-4378-b358-67cbb7018878\") " pod="openshift-ovn-kubernetes/ovnkube-node-g7fr7" Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.106820 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/046df2ef-fb75-4d32-93e6-17b36af0a7c2-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "046df2ef-fb75-4d32-93e6-17b36af0a7c2" (UID: "046df2ef-fb75-4d32-93e6-17b36af0a7c2"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.107754 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/c4e0517b-668f-4378-b358-67cbb7018878-node-log\") pod \"ovnkube-node-g7fr7\" (UID: \"c4e0517b-668f-4378-b358-67cbb7018878\") " pod="openshift-ovn-kubernetes/ovnkube-node-g7fr7" Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.107913 4779 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/046df2ef-fb75-4d32-93e6-17b36af0a7c2-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.107932 4779 reconciler_common.go:293] "Volume detached for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/046df2ef-fb75-4d32-93e6-17b36af0a7c2-host-kubelet\") on node \"crc\" DevicePath \"\"" Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.107945 4779 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/046df2ef-fb75-4d32-93e6-17b36af0a7c2-env-overrides\") on node \"crc\" DevicePath \"\"" Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.107957 4779 reconciler_common.go:293] "Volume detached for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/046df2ef-fb75-4d32-93e6-17b36af0a7c2-host-cni-netd\") on node \"crc\" DevicePath \"\"" Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.107969 4779 reconciler_common.go:293] "Volume detached for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/046df2ef-fb75-4d32-93e6-17b36af0a7c2-host-slash\") on node \"crc\" DevicePath \"\"" Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.107980 4779 reconciler_common.go:293] "Volume detached for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/046df2ef-fb75-4d32-93e6-17b36af0a7c2-node-log\") on node \"crc\" DevicePath \"\"" Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.108008 4779 reconciler_common.go:293] "Volume detached for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/046df2ef-fb75-4d32-93e6-17b36af0a7c2-host-run-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.108021 4779 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/046df2ef-fb75-4d32-93e6-17b36af0a7c2-ovnkube-config\") on node \"crc\" DevicePath \"\"" Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.108033 4779 reconciler_common.go:293] "Volume detached for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/046df2ef-fb75-4d32-93e6-17b36af0a7c2-var-lib-openvswitch\") on node \"crc\" DevicePath \"\"" Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.108047 4779 reconciler_common.go:293] "Volume detached for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/046df2ef-fb75-4d32-93e6-17b36af0a7c2-host-var-lib-cni-networks-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.108060 4779 reconciler_common.go:293] "Volume detached for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/046df2ef-fb75-4d32-93e6-17b36af0a7c2-host-cni-bin\") on node \"crc\" DevicePath \"\"" Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.108073 4779 reconciler_common.go:293] "Volume detached for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/046df2ef-fb75-4d32-93e6-17b36af0a7c2-run-ovn\") on node \"crc\" DevicePath \"\"" Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.108085 4779 reconciler_common.go:293] "Volume detached for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/046df2ef-fb75-4d32-93e6-17b36af0a7c2-log-socket\") on node \"crc\" DevicePath \"\"" Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.108096 4779 reconciler_common.go:293] "Volume detached for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/046df2ef-fb75-4d32-93e6-17b36af0a7c2-etc-openvswitch\") on node \"crc\" DevicePath \"\"" Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.108108 4779 reconciler_common.go:293] "Volume detached for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/046df2ef-fb75-4d32-93e6-17b36af0a7c2-run-openvswitch\") on node \"crc\" DevicePath \"\"" Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.108119 4779 reconciler_common.go:293] "Volume detached for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/046df2ef-fb75-4d32-93e6-17b36af0a7c2-host-run-netns\") on node \"crc\" DevicePath \"\"" Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.108130 4779 reconciler_common.go:293] "Volume detached for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/046df2ef-fb75-4d32-93e6-17b36af0a7c2-systemd-units\") on node \"crc\" DevicePath \"\"" Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.111206 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-42vjg_046df2ef-fb75-4d32-93e6-17b36af0a7c2/ovnkube-controller/3.log" Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.112092 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/046df2ef-fb75-4d32-93e6-17b36af0a7c2-kube-api-access-x2kg5" (OuterVolumeSpecName: "kube-api-access-x2kg5") pod "046df2ef-fb75-4d32-93e6-17b36af0a7c2" (UID: "046df2ef-fb75-4d32-93e6-17b36af0a7c2"). InnerVolumeSpecName "kube-api-access-x2kg5". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.112613 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/046df2ef-fb75-4d32-93e6-17b36af0a7c2-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "046df2ef-fb75-4d32-93e6-17b36af0a7c2" (UID: "046df2ef-fb75-4d32-93e6-17b36af0a7c2"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.116366 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-42vjg_046df2ef-fb75-4d32-93e6-17b36af0a7c2/ovn-acl-logging/0.log" Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.117144 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-42vjg_046df2ef-fb75-4d32-93e6-17b36af0a7c2/ovn-controller/0.log" Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.118118 4779 generic.go:334] "Generic (PLEG): container finished" podID="046df2ef-fb75-4d32-93e6-17b36af0a7c2" containerID="05fd1e4418540394817185026240ded324511397fc0990e09794511ca0417885" exitCode=0 Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.118161 4779 generic.go:334] "Generic (PLEG): container finished" podID="046df2ef-fb75-4d32-93e6-17b36af0a7c2" containerID="c8993f5be0b77b752f60e7346f609aaeef20db2f0e72cbebde06a11fa7d2f5bb" exitCode=0 Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.118170 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-42vjg" Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.118194 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-42vjg" event={"ID":"046df2ef-fb75-4d32-93e6-17b36af0a7c2","Type":"ContainerDied","Data":"05fd1e4418540394817185026240ded324511397fc0990e09794511ca0417885"} Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.118239 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-42vjg" event={"ID":"046df2ef-fb75-4d32-93e6-17b36af0a7c2","Type":"ContainerDied","Data":"c8993f5be0b77b752f60e7346f609aaeef20db2f0e72cbebde06a11fa7d2f5bb"} Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.118260 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-42vjg" event={"ID":"046df2ef-fb75-4d32-93e6-17b36af0a7c2","Type":"ContainerDied","Data":"0d18ddf7528c789be80d32cd6921ada5ed114d01130b7be97e73ec6d9168267e"} Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.118176 4779 generic.go:334] "Generic (PLEG): container finished" podID="046df2ef-fb75-4d32-93e6-17b36af0a7c2" containerID="0d18ddf7528c789be80d32cd6921ada5ed114d01130b7be97e73ec6d9168267e" exitCode=0 Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.118469 4779 generic.go:334] "Generic (PLEG): container finished" podID="046df2ef-fb75-4d32-93e6-17b36af0a7c2" containerID="6b6ffbcb52a6e250cd4b4bc4e8c089ef94babaaf0db836fec9f477450464eebe" exitCode=0 Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.118506 4779 generic.go:334] "Generic (PLEG): container finished" podID="046df2ef-fb75-4d32-93e6-17b36af0a7c2" containerID="1555e96e70a9119cf2298d91512b1d8cffb303b4327d0e20bdf686e7e36e4461" exitCode=0 Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.118522 4779 generic.go:334] "Generic (PLEG): container finished" podID="046df2ef-fb75-4d32-93e6-17b36af0a7c2" containerID="629d703a556665e86ce3b0a5af995157440c71c8ab7d29b9259865cd624543c7" exitCode=0 Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.118539 4779 generic.go:334] "Generic (PLEG): container finished" podID="046df2ef-fb75-4d32-93e6-17b36af0a7c2" containerID="76304a56be996b646d706c0e7df995bb1518962396c7558304b1627e5c215c11" exitCode=143 Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.118556 4779 generic.go:334] "Generic (PLEG): container finished" podID="046df2ef-fb75-4d32-93e6-17b36af0a7c2" containerID="6dcd0277739f6d5e5c90c617485f053c1c3996192f15b081c660741987ec2611" exitCode=143 Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.118469 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-42vjg" event={"ID":"046df2ef-fb75-4d32-93e6-17b36af0a7c2","Type":"ContainerDied","Data":"6b6ffbcb52a6e250cd4b4bc4e8c089ef94babaaf0db836fec9f477450464eebe"} Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.118619 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-42vjg" event={"ID":"046df2ef-fb75-4d32-93e6-17b36af0a7c2","Type":"ContainerDied","Data":"1555e96e70a9119cf2298d91512b1d8cffb303b4327d0e20bdf686e7e36e4461"} Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.118649 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-42vjg" event={"ID":"046df2ef-fb75-4d32-93e6-17b36af0a7c2","Type":"ContainerDied","Data":"629d703a556665e86ce3b0a5af995157440c71c8ab7d29b9259865cd624543c7"} Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.118674 4779 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"05fd1e4418540394817185026240ded324511397fc0990e09794511ca0417885"} Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.118693 4779 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"2e04b0f18f7180f57e083d45291ba413fcda4de8f92aa53189b296e6bc5c3905"} Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.118706 4779 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"c8993f5be0b77b752f60e7346f609aaeef20db2f0e72cbebde06a11fa7d2f5bb"} Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.118721 4779 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"0d18ddf7528c789be80d32cd6921ada5ed114d01130b7be97e73ec6d9168267e"} Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.118732 4779 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"6b6ffbcb52a6e250cd4b4bc4e8c089ef94babaaf0db836fec9f477450464eebe"} Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.118744 4779 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"1555e96e70a9119cf2298d91512b1d8cffb303b4327d0e20bdf686e7e36e4461"} Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.118755 4779 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"629d703a556665e86ce3b0a5af995157440c71c8ab7d29b9259865cd624543c7"} Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.118765 4779 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"76304a56be996b646d706c0e7df995bb1518962396c7558304b1627e5c215c11"} Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.118777 4779 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"6dcd0277739f6d5e5c90c617485f053c1c3996192f15b081c660741987ec2611"} Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.118788 4779 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"e1e03c63d08ebe2a4b76954140684a5b43da9e92b93490aa7efa715fc2aefb4c"} Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.118803 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-42vjg" event={"ID":"046df2ef-fb75-4d32-93e6-17b36af0a7c2","Type":"ContainerDied","Data":"76304a56be996b646d706c0e7df995bb1518962396c7558304b1627e5c215c11"} Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.118820 4779 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"05fd1e4418540394817185026240ded324511397fc0990e09794511ca0417885"} Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.118832 4779 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"2e04b0f18f7180f57e083d45291ba413fcda4de8f92aa53189b296e6bc5c3905"} Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.118843 4779 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"c8993f5be0b77b752f60e7346f609aaeef20db2f0e72cbebde06a11fa7d2f5bb"} Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.118854 4779 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"0d18ddf7528c789be80d32cd6921ada5ed114d01130b7be97e73ec6d9168267e"} Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.118867 4779 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"6b6ffbcb52a6e250cd4b4bc4e8c089ef94babaaf0db836fec9f477450464eebe"} Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.118878 4779 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"1555e96e70a9119cf2298d91512b1d8cffb303b4327d0e20bdf686e7e36e4461"} Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.118889 4779 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"629d703a556665e86ce3b0a5af995157440c71c8ab7d29b9259865cd624543c7"} Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.118900 4779 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"76304a56be996b646d706c0e7df995bb1518962396c7558304b1627e5c215c11"} Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.118911 4779 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"6dcd0277739f6d5e5c90c617485f053c1c3996192f15b081c660741987ec2611"} Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.118922 4779 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"e1e03c63d08ebe2a4b76954140684a5b43da9e92b93490aa7efa715fc2aefb4c"} Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.118936 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-42vjg" event={"ID":"046df2ef-fb75-4d32-93e6-17b36af0a7c2","Type":"ContainerDied","Data":"6dcd0277739f6d5e5c90c617485f053c1c3996192f15b081c660741987ec2611"} Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.118952 4779 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"05fd1e4418540394817185026240ded324511397fc0990e09794511ca0417885"} Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.118965 4779 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"2e04b0f18f7180f57e083d45291ba413fcda4de8f92aa53189b296e6bc5c3905"} Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.118976 4779 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"c8993f5be0b77b752f60e7346f609aaeef20db2f0e72cbebde06a11fa7d2f5bb"} Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.118990 4779 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"0d18ddf7528c789be80d32cd6921ada5ed114d01130b7be97e73ec6d9168267e"} Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.119000 4779 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"6b6ffbcb52a6e250cd4b4bc4e8c089ef94babaaf0db836fec9f477450464eebe"} Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.119011 4779 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"1555e96e70a9119cf2298d91512b1d8cffb303b4327d0e20bdf686e7e36e4461"} Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.119021 4779 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"629d703a556665e86ce3b0a5af995157440c71c8ab7d29b9259865cd624543c7"} Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.119032 4779 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"76304a56be996b646d706c0e7df995bb1518962396c7558304b1627e5c215c11"} Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.119042 4779 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"6dcd0277739f6d5e5c90c617485f053c1c3996192f15b081c660741987ec2611"} Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.119052 4779 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"e1e03c63d08ebe2a4b76954140684a5b43da9e92b93490aa7efa715fc2aefb4c"} Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.119066 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-42vjg" event={"ID":"046df2ef-fb75-4d32-93e6-17b36af0a7c2","Type":"ContainerDied","Data":"66a10a1da39ecac420aef1a9520c7fa31a9b94be01cf95454ef84d875a969b8d"} Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.119087 4779 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"05fd1e4418540394817185026240ded324511397fc0990e09794511ca0417885"} Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.119099 4779 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"2e04b0f18f7180f57e083d45291ba413fcda4de8f92aa53189b296e6bc5c3905"} Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.119111 4779 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"c8993f5be0b77b752f60e7346f609aaeef20db2f0e72cbebde06a11fa7d2f5bb"} Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.119123 4779 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"0d18ddf7528c789be80d32cd6921ada5ed114d01130b7be97e73ec6d9168267e"} Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.119133 4779 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"6b6ffbcb52a6e250cd4b4bc4e8c089ef94babaaf0db836fec9f477450464eebe"} Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.119145 4779 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"1555e96e70a9119cf2298d91512b1d8cffb303b4327d0e20bdf686e7e36e4461"} Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.119156 4779 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"629d703a556665e86ce3b0a5af995157440c71c8ab7d29b9259865cd624543c7"} Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.119166 4779 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"76304a56be996b646d706c0e7df995bb1518962396c7558304b1627e5c215c11"} Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.119177 4779 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"6dcd0277739f6d5e5c90c617485f053c1c3996192f15b081c660741987ec2611"} Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.119187 4779 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"e1e03c63d08ebe2a4b76954140684a5b43da9e92b93490aa7efa715fc2aefb4c"} Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.127100 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/046df2ef-fb75-4d32-93e6-17b36af0a7c2-run-systemd" (OuterVolumeSpecName: "run-systemd") pod "046df2ef-fb75-4d32-93e6-17b36af0a7c2" (UID: "046df2ef-fb75-4d32-93e6-17b36af0a7c2"). InnerVolumeSpecName "run-systemd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.149775 4779 scope.go:117] "RemoveContainer" containerID="05fd1e4418540394817185026240ded324511397fc0990e09794511ca0417885" Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.170455 4779 scope.go:117] "RemoveContainer" containerID="2e04b0f18f7180f57e083d45291ba413fcda4de8f92aa53189b296e6bc5c3905" Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.193408 4779 scope.go:117] "RemoveContainer" containerID="c8993f5be0b77b752f60e7346f609aaeef20db2f0e72cbebde06a11fa7d2f5bb" Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.210069 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/c4e0517b-668f-4378-b358-67cbb7018878-host-run-ovn-kubernetes\") pod \"ovnkube-node-g7fr7\" (UID: \"c4e0517b-668f-4378-b358-67cbb7018878\") " pod="openshift-ovn-kubernetes/ovnkube-node-g7fr7" Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.210155 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/c4e0517b-668f-4378-b358-67cbb7018878-var-lib-openvswitch\") pod \"ovnkube-node-g7fr7\" (UID: \"c4e0517b-668f-4378-b358-67cbb7018878\") " pod="openshift-ovn-kubernetes/ovnkube-node-g7fr7" Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.210220 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/c4e0517b-668f-4378-b358-67cbb7018878-ovnkube-config\") pod \"ovnkube-node-g7fr7\" (UID: \"c4e0517b-668f-4378-b358-67cbb7018878\") " pod="openshift-ovn-kubernetes/ovnkube-node-g7fr7" Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.210288 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/c4e0517b-668f-4378-b358-67cbb7018878-run-ovn\") pod \"ovnkube-node-g7fr7\" (UID: \"c4e0517b-668f-4378-b358-67cbb7018878\") " pod="openshift-ovn-kubernetes/ovnkube-node-g7fr7" Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.210309 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/c4e0517b-668f-4378-b358-67cbb7018878-var-lib-openvswitch\") pod \"ovnkube-node-g7fr7\" (UID: \"c4e0517b-668f-4378-b358-67cbb7018878\") " pod="openshift-ovn-kubernetes/ovnkube-node-g7fr7" Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.210337 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/c4e0517b-668f-4378-b358-67cbb7018878-host-run-ovn-kubernetes\") pod \"ovnkube-node-g7fr7\" (UID: \"c4e0517b-668f-4378-b358-67cbb7018878\") " pod="openshift-ovn-kubernetes/ovnkube-node-g7fr7" Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.210376 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/c4e0517b-668f-4378-b358-67cbb7018878-run-systemd\") pod \"ovnkube-node-g7fr7\" (UID: \"c4e0517b-668f-4378-b358-67cbb7018878\") " pod="openshift-ovn-kubernetes/ovnkube-node-g7fr7" Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.210398 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/c4e0517b-668f-4378-b358-67cbb7018878-run-ovn\") pod \"ovnkube-node-g7fr7\" (UID: \"c4e0517b-668f-4378-b358-67cbb7018878\") " pod="openshift-ovn-kubernetes/ovnkube-node-g7fr7" Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.210436 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/c4e0517b-668f-4378-b358-67cbb7018878-ovn-node-metrics-cert\") pod \"ovnkube-node-g7fr7\" (UID: \"c4e0517b-668f-4378-b358-67cbb7018878\") " pod="openshift-ovn-kubernetes/ovnkube-node-g7fr7" Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.210480 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/c4e0517b-668f-4378-b358-67cbb7018878-host-kubelet\") pod \"ovnkube-node-g7fr7\" (UID: \"c4e0517b-668f-4378-b358-67cbb7018878\") " pod="openshift-ovn-kubernetes/ovnkube-node-g7fr7" Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.210522 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/c4e0517b-668f-4378-b358-67cbb7018878-host-cni-netd\") pod \"ovnkube-node-g7fr7\" (UID: \"c4e0517b-668f-4378-b358-67cbb7018878\") " pod="openshift-ovn-kubernetes/ovnkube-node-g7fr7" Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.210573 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/c4e0517b-668f-4378-b358-67cbb7018878-run-systemd\") pod \"ovnkube-node-g7fr7\" (UID: \"c4e0517b-668f-4378-b358-67cbb7018878\") " pod="openshift-ovn-kubernetes/ovnkube-node-g7fr7" Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.210616 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/c4e0517b-668f-4378-b358-67cbb7018878-host-kubelet\") pod \"ovnkube-node-g7fr7\" (UID: \"c4e0517b-668f-4378-b358-67cbb7018878\") " pod="openshift-ovn-kubernetes/ovnkube-node-g7fr7" Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.210577 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/c4e0517b-668f-4378-b358-67cbb7018878-host-run-netns\") pod \"ovnkube-node-g7fr7\" (UID: \"c4e0517b-668f-4378-b358-67cbb7018878\") " pod="openshift-ovn-kubernetes/ovnkube-node-g7fr7" Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.210673 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/c4e0517b-668f-4378-b358-67cbb7018878-host-cni-netd\") pod \"ovnkube-node-g7fr7\" (UID: \"c4e0517b-668f-4378-b358-67cbb7018878\") " pod="openshift-ovn-kubernetes/ovnkube-node-g7fr7" Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.210697 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/c4e0517b-668f-4378-b358-67cbb7018878-host-slash\") pod \"ovnkube-node-g7fr7\" (UID: \"c4e0517b-668f-4378-b358-67cbb7018878\") " pod="openshift-ovn-kubernetes/ovnkube-node-g7fr7" Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.210719 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/c4e0517b-668f-4378-b358-67cbb7018878-host-slash\") pod \"ovnkube-node-g7fr7\" (UID: \"c4e0517b-668f-4378-b358-67cbb7018878\") " pod="openshift-ovn-kubernetes/ovnkube-node-g7fr7" Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.210737 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/c4e0517b-668f-4378-b358-67cbb7018878-host-run-netns\") pod \"ovnkube-node-g7fr7\" (UID: \"c4e0517b-668f-4378-b358-67cbb7018878\") " pod="openshift-ovn-kubernetes/ovnkube-node-g7fr7" Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.210785 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/c4e0517b-668f-4378-b358-67cbb7018878-host-cni-bin\") pod \"ovnkube-node-g7fr7\" (UID: \"c4e0517b-668f-4378-b358-67cbb7018878\") " pod="openshift-ovn-kubernetes/ovnkube-node-g7fr7" Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.210824 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m6bzz\" (UniqueName: \"kubernetes.io/projected/c4e0517b-668f-4378-b358-67cbb7018878-kube-api-access-m6bzz\") pod \"ovnkube-node-g7fr7\" (UID: \"c4e0517b-668f-4378-b358-67cbb7018878\") " pod="openshift-ovn-kubernetes/ovnkube-node-g7fr7" Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.210849 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/c4e0517b-668f-4378-b358-67cbb7018878-systemd-units\") pod \"ovnkube-node-g7fr7\" (UID: \"c4e0517b-668f-4378-b358-67cbb7018878\") " pod="openshift-ovn-kubernetes/ovnkube-node-g7fr7" Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.210890 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/c4e0517b-668f-4378-b358-67cbb7018878-host-cni-bin\") pod \"ovnkube-node-g7fr7\" (UID: \"c4e0517b-668f-4378-b358-67cbb7018878\") " pod="openshift-ovn-kubernetes/ovnkube-node-g7fr7" Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.210934 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/c4e0517b-668f-4378-b358-67cbb7018878-ovnkube-script-lib\") pod \"ovnkube-node-g7fr7\" (UID: \"c4e0517b-668f-4378-b358-67cbb7018878\") " pod="openshift-ovn-kubernetes/ovnkube-node-g7fr7" Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.210975 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/c4e0517b-668f-4378-b358-67cbb7018878-run-openvswitch\") pod \"ovnkube-node-g7fr7\" (UID: \"c4e0517b-668f-4378-b358-67cbb7018878\") " pod="openshift-ovn-kubernetes/ovnkube-node-g7fr7" Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.210996 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/c4e0517b-668f-4378-b358-67cbb7018878-log-socket\") pod \"ovnkube-node-g7fr7\" (UID: \"c4e0517b-668f-4378-b358-67cbb7018878\") " pod="openshift-ovn-kubernetes/ovnkube-node-g7fr7" Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.211032 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/c4e0517b-668f-4378-b358-67cbb7018878-etc-openvswitch\") pod \"ovnkube-node-g7fr7\" (UID: \"c4e0517b-668f-4378-b358-67cbb7018878\") " pod="openshift-ovn-kubernetes/ovnkube-node-g7fr7" Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.211065 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/c4e0517b-668f-4378-b358-67cbb7018878-node-log\") pod \"ovnkube-node-g7fr7\" (UID: \"c4e0517b-668f-4378-b358-67cbb7018878\") " pod="openshift-ovn-kubernetes/ovnkube-node-g7fr7" Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.211113 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/c4e0517b-668f-4378-b358-67cbb7018878-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-g7fr7\" (UID: \"c4e0517b-668f-4378-b358-67cbb7018878\") " pod="openshift-ovn-kubernetes/ovnkube-node-g7fr7" Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.211157 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/c4e0517b-668f-4378-b358-67cbb7018878-env-overrides\") pod \"ovnkube-node-g7fr7\" (UID: \"c4e0517b-668f-4378-b358-67cbb7018878\") " pod="openshift-ovn-kubernetes/ovnkube-node-g7fr7" Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.211225 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/c4e0517b-668f-4378-b358-67cbb7018878-log-socket\") pod \"ovnkube-node-g7fr7\" (UID: \"c4e0517b-668f-4378-b358-67cbb7018878\") " pod="openshift-ovn-kubernetes/ovnkube-node-g7fr7" Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.211258 4779 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/046df2ef-fb75-4d32-93e6-17b36af0a7c2-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.211281 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x2kg5\" (UniqueName: \"kubernetes.io/projected/046df2ef-fb75-4d32-93e6-17b36af0a7c2-kube-api-access-x2kg5\") on node \"crc\" DevicePath \"\"" Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.211294 4779 reconciler_common.go:293] "Volume detached for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/046df2ef-fb75-4d32-93e6-17b36af0a7c2-run-systemd\") on node \"crc\" DevicePath \"\"" Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.211351 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/c4e0517b-668f-4378-b358-67cbb7018878-systemd-units\") pod \"ovnkube-node-g7fr7\" (UID: \"c4e0517b-668f-4378-b358-67cbb7018878\") " pod="openshift-ovn-kubernetes/ovnkube-node-g7fr7" Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.211627 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/c4e0517b-668f-4378-b358-67cbb7018878-etc-openvswitch\") pod \"ovnkube-node-g7fr7\" (UID: \"c4e0517b-668f-4378-b358-67cbb7018878\") " pod="openshift-ovn-kubernetes/ovnkube-node-g7fr7" Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.211644 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/c4e0517b-668f-4378-b358-67cbb7018878-node-log\") pod \"ovnkube-node-g7fr7\" (UID: \"c4e0517b-668f-4378-b358-67cbb7018878\") " pod="openshift-ovn-kubernetes/ovnkube-node-g7fr7" Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.211813 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/c4e0517b-668f-4378-b358-67cbb7018878-run-openvswitch\") pod \"ovnkube-node-g7fr7\" (UID: \"c4e0517b-668f-4378-b358-67cbb7018878\") " pod="openshift-ovn-kubernetes/ovnkube-node-g7fr7" Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.211858 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/c4e0517b-668f-4378-b358-67cbb7018878-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-g7fr7\" (UID: \"c4e0517b-668f-4378-b358-67cbb7018878\") " pod="openshift-ovn-kubernetes/ovnkube-node-g7fr7" Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.212358 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/c4e0517b-668f-4378-b358-67cbb7018878-env-overrides\") pod \"ovnkube-node-g7fr7\" (UID: \"c4e0517b-668f-4378-b358-67cbb7018878\") " pod="openshift-ovn-kubernetes/ovnkube-node-g7fr7" Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.212384 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/c4e0517b-668f-4378-b358-67cbb7018878-ovnkube-script-lib\") pod \"ovnkube-node-g7fr7\" (UID: \"c4e0517b-668f-4378-b358-67cbb7018878\") " pod="openshift-ovn-kubernetes/ovnkube-node-g7fr7" Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.213693 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/c4e0517b-668f-4378-b358-67cbb7018878-ovnkube-config\") pod \"ovnkube-node-g7fr7\" (UID: \"c4e0517b-668f-4378-b358-67cbb7018878\") " pod="openshift-ovn-kubernetes/ovnkube-node-g7fr7" Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.218051 4779 scope.go:117] "RemoveContainer" containerID="0d18ddf7528c789be80d32cd6921ada5ed114d01130b7be97e73ec6d9168267e" Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.218569 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/c4e0517b-668f-4378-b358-67cbb7018878-ovn-node-metrics-cert\") pod \"ovnkube-node-g7fr7\" (UID: \"c4e0517b-668f-4378-b358-67cbb7018878\") " pod="openshift-ovn-kubernetes/ovnkube-node-g7fr7" Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.240269 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m6bzz\" (UniqueName: \"kubernetes.io/projected/c4e0517b-668f-4378-b358-67cbb7018878-kube-api-access-m6bzz\") pod \"ovnkube-node-g7fr7\" (UID: \"c4e0517b-668f-4378-b358-67cbb7018878\") " pod="openshift-ovn-kubernetes/ovnkube-node-g7fr7" Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.243025 4779 scope.go:117] "RemoveContainer" containerID="6b6ffbcb52a6e250cd4b4bc4e8c089ef94babaaf0db836fec9f477450464eebe" Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.265418 4779 scope.go:117] "RemoveContainer" containerID="1555e96e70a9119cf2298d91512b1d8cffb303b4327d0e20bdf686e7e36e4461" Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.287740 4779 scope.go:117] "RemoveContainer" containerID="629d703a556665e86ce3b0a5af995157440c71c8ab7d29b9259865cd624543c7" Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.311402 4779 scope.go:117] "RemoveContainer" containerID="76304a56be996b646d706c0e7df995bb1518962396c7558304b1627e5c215c11" Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.312436 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-g7fr7" Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.339968 4779 scope.go:117] "RemoveContainer" containerID="6dcd0277739f6d5e5c90c617485f053c1c3996192f15b081c660741987ec2611" Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.366160 4779 scope.go:117] "RemoveContainer" containerID="e1e03c63d08ebe2a4b76954140684a5b43da9e92b93490aa7efa715fc2aefb4c" Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.395794 4779 scope.go:117] "RemoveContainer" containerID="05fd1e4418540394817185026240ded324511397fc0990e09794511ca0417885" Sep 29 19:18:20 crc kubenswrapper[4779]: E0929 19:18:20.396733 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"05fd1e4418540394817185026240ded324511397fc0990e09794511ca0417885\": container with ID starting with 05fd1e4418540394817185026240ded324511397fc0990e09794511ca0417885 not found: ID does not exist" containerID="05fd1e4418540394817185026240ded324511397fc0990e09794511ca0417885" Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.396943 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"05fd1e4418540394817185026240ded324511397fc0990e09794511ca0417885"} err="failed to get container status \"05fd1e4418540394817185026240ded324511397fc0990e09794511ca0417885\": rpc error: code = NotFound desc = could not find container \"05fd1e4418540394817185026240ded324511397fc0990e09794511ca0417885\": container with ID starting with 05fd1e4418540394817185026240ded324511397fc0990e09794511ca0417885 not found: ID does not exist" Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.397091 4779 scope.go:117] "RemoveContainer" containerID="2e04b0f18f7180f57e083d45291ba413fcda4de8f92aa53189b296e6bc5c3905" Sep 29 19:18:20 crc kubenswrapper[4779]: E0929 19:18:20.397732 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2e04b0f18f7180f57e083d45291ba413fcda4de8f92aa53189b296e6bc5c3905\": container with ID starting with 2e04b0f18f7180f57e083d45291ba413fcda4de8f92aa53189b296e6bc5c3905 not found: ID does not exist" containerID="2e04b0f18f7180f57e083d45291ba413fcda4de8f92aa53189b296e6bc5c3905" Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.397982 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2e04b0f18f7180f57e083d45291ba413fcda4de8f92aa53189b296e6bc5c3905"} err="failed to get container status \"2e04b0f18f7180f57e083d45291ba413fcda4de8f92aa53189b296e6bc5c3905\": rpc error: code = NotFound desc = could not find container \"2e04b0f18f7180f57e083d45291ba413fcda4de8f92aa53189b296e6bc5c3905\": container with ID starting with 2e04b0f18f7180f57e083d45291ba413fcda4de8f92aa53189b296e6bc5c3905 not found: ID does not exist" Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.398128 4779 scope.go:117] "RemoveContainer" containerID="c8993f5be0b77b752f60e7346f609aaeef20db2f0e72cbebde06a11fa7d2f5bb" Sep 29 19:18:20 crc kubenswrapper[4779]: E0929 19:18:20.398851 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c8993f5be0b77b752f60e7346f609aaeef20db2f0e72cbebde06a11fa7d2f5bb\": container with ID starting with c8993f5be0b77b752f60e7346f609aaeef20db2f0e72cbebde06a11fa7d2f5bb not found: ID does not exist" containerID="c8993f5be0b77b752f60e7346f609aaeef20db2f0e72cbebde06a11fa7d2f5bb" Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.398931 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c8993f5be0b77b752f60e7346f609aaeef20db2f0e72cbebde06a11fa7d2f5bb"} err="failed to get container status \"c8993f5be0b77b752f60e7346f609aaeef20db2f0e72cbebde06a11fa7d2f5bb\": rpc error: code = NotFound desc = could not find container \"c8993f5be0b77b752f60e7346f609aaeef20db2f0e72cbebde06a11fa7d2f5bb\": container with ID starting with c8993f5be0b77b752f60e7346f609aaeef20db2f0e72cbebde06a11fa7d2f5bb not found: ID does not exist" Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.398991 4779 scope.go:117] "RemoveContainer" containerID="0d18ddf7528c789be80d32cd6921ada5ed114d01130b7be97e73ec6d9168267e" Sep 29 19:18:20 crc kubenswrapper[4779]: E0929 19:18:20.399893 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0d18ddf7528c789be80d32cd6921ada5ed114d01130b7be97e73ec6d9168267e\": container with ID starting with 0d18ddf7528c789be80d32cd6921ada5ed114d01130b7be97e73ec6d9168267e not found: ID does not exist" containerID="0d18ddf7528c789be80d32cd6921ada5ed114d01130b7be97e73ec6d9168267e" Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.399939 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0d18ddf7528c789be80d32cd6921ada5ed114d01130b7be97e73ec6d9168267e"} err="failed to get container status \"0d18ddf7528c789be80d32cd6921ada5ed114d01130b7be97e73ec6d9168267e\": rpc error: code = NotFound desc = could not find container \"0d18ddf7528c789be80d32cd6921ada5ed114d01130b7be97e73ec6d9168267e\": container with ID starting with 0d18ddf7528c789be80d32cd6921ada5ed114d01130b7be97e73ec6d9168267e not found: ID does not exist" Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.399968 4779 scope.go:117] "RemoveContainer" containerID="6b6ffbcb52a6e250cd4b4bc4e8c089ef94babaaf0db836fec9f477450464eebe" Sep 29 19:18:20 crc kubenswrapper[4779]: E0929 19:18:20.400344 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6b6ffbcb52a6e250cd4b4bc4e8c089ef94babaaf0db836fec9f477450464eebe\": container with ID starting with 6b6ffbcb52a6e250cd4b4bc4e8c089ef94babaaf0db836fec9f477450464eebe not found: ID does not exist" containerID="6b6ffbcb52a6e250cd4b4bc4e8c089ef94babaaf0db836fec9f477450464eebe" Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.400377 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6b6ffbcb52a6e250cd4b4bc4e8c089ef94babaaf0db836fec9f477450464eebe"} err="failed to get container status \"6b6ffbcb52a6e250cd4b4bc4e8c089ef94babaaf0db836fec9f477450464eebe\": rpc error: code = NotFound desc = could not find container \"6b6ffbcb52a6e250cd4b4bc4e8c089ef94babaaf0db836fec9f477450464eebe\": container with ID starting with 6b6ffbcb52a6e250cd4b4bc4e8c089ef94babaaf0db836fec9f477450464eebe not found: ID does not exist" Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.400403 4779 scope.go:117] "RemoveContainer" containerID="1555e96e70a9119cf2298d91512b1d8cffb303b4327d0e20bdf686e7e36e4461" Sep 29 19:18:20 crc kubenswrapper[4779]: E0929 19:18:20.400721 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1555e96e70a9119cf2298d91512b1d8cffb303b4327d0e20bdf686e7e36e4461\": container with ID starting with 1555e96e70a9119cf2298d91512b1d8cffb303b4327d0e20bdf686e7e36e4461 not found: ID does not exist" containerID="1555e96e70a9119cf2298d91512b1d8cffb303b4327d0e20bdf686e7e36e4461" Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.400753 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1555e96e70a9119cf2298d91512b1d8cffb303b4327d0e20bdf686e7e36e4461"} err="failed to get container status \"1555e96e70a9119cf2298d91512b1d8cffb303b4327d0e20bdf686e7e36e4461\": rpc error: code = NotFound desc = could not find container \"1555e96e70a9119cf2298d91512b1d8cffb303b4327d0e20bdf686e7e36e4461\": container with ID starting with 1555e96e70a9119cf2298d91512b1d8cffb303b4327d0e20bdf686e7e36e4461 not found: ID does not exist" Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.400775 4779 scope.go:117] "RemoveContainer" containerID="629d703a556665e86ce3b0a5af995157440c71c8ab7d29b9259865cd624543c7" Sep 29 19:18:20 crc kubenswrapper[4779]: E0929 19:18:20.401075 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"629d703a556665e86ce3b0a5af995157440c71c8ab7d29b9259865cd624543c7\": container with ID starting with 629d703a556665e86ce3b0a5af995157440c71c8ab7d29b9259865cd624543c7 not found: ID does not exist" containerID="629d703a556665e86ce3b0a5af995157440c71c8ab7d29b9259865cd624543c7" Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.401102 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"629d703a556665e86ce3b0a5af995157440c71c8ab7d29b9259865cd624543c7"} err="failed to get container status \"629d703a556665e86ce3b0a5af995157440c71c8ab7d29b9259865cd624543c7\": rpc error: code = NotFound desc = could not find container \"629d703a556665e86ce3b0a5af995157440c71c8ab7d29b9259865cd624543c7\": container with ID starting with 629d703a556665e86ce3b0a5af995157440c71c8ab7d29b9259865cd624543c7 not found: ID does not exist" Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.401125 4779 scope.go:117] "RemoveContainer" containerID="76304a56be996b646d706c0e7df995bb1518962396c7558304b1627e5c215c11" Sep 29 19:18:20 crc kubenswrapper[4779]: E0929 19:18:20.401485 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"76304a56be996b646d706c0e7df995bb1518962396c7558304b1627e5c215c11\": container with ID starting with 76304a56be996b646d706c0e7df995bb1518962396c7558304b1627e5c215c11 not found: ID does not exist" containerID="76304a56be996b646d706c0e7df995bb1518962396c7558304b1627e5c215c11" Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.401516 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"76304a56be996b646d706c0e7df995bb1518962396c7558304b1627e5c215c11"} err="failed to get container status \"76304a56be996b646d706c0e7df995bb1518962396c7558304b1627e5c215c11\": rpc error: code = NotFound desc = could not find container \"76304a56be996b646d706c0e7df995bb1518962396c7558304b1627e5c215c11\": container with ID starting with 76304a56be996b646d706c0e7df995bb1518962396c7558304b1627e5c215c11 not found: ID does not exist" Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.401538 4779 scope.go:117] "RemoveContainer" containerID="6dcd0277739f6d5e5c90c617485f053c1c3996192f15b081c660741987ec2611" Sep 29 19:18:20 crc kubenswrapper[4779]: E0929 19:18:20.401866 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6dcd0277739f6d5e5c90c617485f053c1c3996192f15b081c660741987ec2611\": container with ID starting with 6dcd0277739f6d5e5c90c617485f053c1c3996192f15b081c660741987ec2611 not found: ID does not exist" containerID="6dcd0277739f6d5e5c90c617485f053c1c3996192f15b081c660741987ec2611" Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.401896 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6dcd0277739f6d5e5c90c617485f053c1c3996192f15b081c660741987ec2611"} err="failed to get container status \"6dcd0277739f6d5e5c90c617485f053c1c3996192f15b081c660741987ec2611\": rpc error: code = NotFound desc = could not find container \"6dcd0277739f6d5e5c90c617485f053c1c3996192f15b081c660741987ec2611\": container with ID starting with 6dcd0277739f6d5e5c90c617485f053c1c3996192f15b081c660741987ec2611 not found: ID does not exist" Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.401919 4779 scope.go:117] "RemoveContainer" containerID="e1e03c63d08ebe2a4b76954140684a5b43da9e92b93490aa7efa715fc2aefb4c" Sep 29 19:18:20 crc kubenswrapper[4779]: E0929 19:18:20.402184 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e1e03c63d08ebe2a4b76954140684a5b43da9e92b93490aa7efa715fc2aefb4c\": container with ID starting with e1e03c63d08ebe2a4b76954140684a5b43da9e92b93490aa7efa715fc2aefb4c not found: ID does not exist" containerID="e1e03c63d08ebe2a4b76954140684a5b43da9e92b93490aa7efa715fc2aefb4c" Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.402211 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e1e03c63d08ebe2a4b76954140684a5b43da9e92b93490aa7efa715fc2aefb4c"} err="failed to get container status \"e1e03c63d08ebe2a4b76954140684a5b43da9e92b93490aa7efa715fc2aefb4c\": rpc error: code = NotFound desc = could not find container \"e1e03c63d08ebe2a4b76954140684a5b43da9e92b93490aa7efa715fc2aefb4c\": container with ID starting with e1e03c63d08ebe2a4b76954140684a5b43da9e92b93490aa7efa715fc2aefb4c not found: ID does not exist" Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.402233 4779 scope.go:117] "RemoveContainer" containerID="05fd1e4418540394817185026240ded324511397fc0990e09794511ca0417885" Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.402581 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"05fd1e4418540394817185026240ded324511397fc0990e09794511ca0417885"} err="failed to get container status \"05fd1e4418540394817185026240ded324511397fc0990e09794511ca0417885\": rpc error: code = NotFound desc = could not find container \"05fd1e4418540394817185026240ded324511397fc0990e09794511ca0417885\": container with ID starting with 05fd1e4418540394817185026240ded324511397fc0990e09794511ca0417885 not found: ID does not exist" Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.402607 4779 scope.go:117] "RemoveContainer" containerID="2e04b0f18f7180f57e083d45291ba413fcda4de8f92aa53189b296e6bc5c3905" Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.402879 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2e04b0f18f7180f57e083d45291ba413fcda4de8f92aa53189b296e6bc5c3905"} err="failed to get container status \"2e04b0f18f7180f57e083d45291ba413fcda4de8f92aa53189b296e6bc5c3905\": rpc error: code = NotFound desc = could not find container \"2e04b0f18f7180f57e083d45291ba413fcda4de8f92aa53189b296e6bc5c3905\": container with ID starting with 2e04b0f18f7180f57e083d45291ba413fcda4de8f92aa53189b296e6bc5c3905 not found: ID does not exist" Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.402904 4779 scope.go:117] "RemoveContainer" containerID="c8993f5be0b77b752f60e7346f609aaeef20db2f0e72cbebde06a11fa7d2f5bb" Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.403156 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c8993f5be0b77b752f60e7346f609aaeef20db2f0e72cbebde06a11fa7d2f5bb"} err="failed to get container status \"c8993f5be0b77b752f60e7346f609aaeef20db2f0e72cbebde06a11fa7d2f5bb\": rpc error: code = NotFound desc = could not find container \"c8993f5be0b77b752f60e7346f609aaeef20db2f0e72cbebde06a11fa7d2f5bb\": container with ID starting with c8993f5be0b77b752f60e7346f609aaeef20db2f0e72cbebde06a11fa7d2f5bb not found: ID does not exist" Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.403179 4779 scope.go:117] "RemoveContainer" containerID="0d18ddf7528c789be80d32cd6921ada5ed114d01130b7be97e73ec6d9168267e" Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.403459 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0d18ddf7528c789be80d32cd6921ada5ed114d01130b7be97e73ec6d9168267e"} err="failed to get container status \"0d18ddf7528c789be80d32cd6921ada5ed114d01130b7be97e73ec6d9168267e\": rpc error: code = NotFound desc = could not find container \"0d18ddf7528c789be80d32cd6921ada5ed114d01130b7be97e73ec6d9168267e\": container with ID starting with 0d18ddf7528c789be80d32cd6921ada5ed114d01130b7be97e73ec6d9168267e not found: ID does not exist" Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.403483 4779 scope.go:117] "RemoveContainer" containerID="6b6ffbcb52a6e250cd4b4bc4e8c089ef94babaaf0db836fec9f477450464eebe" Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.411147 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6b6ffbcb52a6e250cd4b4bc4e8c089ef94babaaf0db836fec9f477450464eebe"} err="failed to get container status \"6b6ffbcb52a6e250cd4b4bc4e8c089ef94babaaf0db836fec9f477450464eebe\": rpc error: code = NotFound desc = could not find container \"6b6ffbcb52a6e250cd4b4bc4e8c089ef94babaaf0db836fec9f477450464eebe\": container with ID starting with 6b6ffbcb52a6e250cd4b4bc4e8c089ef94babaaf0db836fec9f477450464eebe not found: ID does not exist" Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.411451 4779 scope.go:117] "RemoveContainer" containerID="1555e96e70a9119cf2298d91512b1d8cffb303b4327d0e20bdf686e7e36e4461" Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.412136 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1555e96e70a9119cf2298d91512b1d8cffb303b4327d0e20bdf686e7e36e4461"} err="failed to get container status \"1555e96e70a9119cf2298d91512b1d8cffb303b4327d0e20bdf686e7e36e4461\": rpc error: code = NotFound desc = could not find container \"1555e96e70a9119cf2298d91512b1d8cffb303b4327d0e20bdf686e7e36e4461\": container with ID starting with 1555e96e70a9119cf2298d91512b1d8cffb303b4327d0e20bdf686e7e36e4461 not found: ID does not exist" Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.412191 4779 scope.go:117] "RemoveContainer" containerID="629d703a556665e86ce3b0a5af995157440c71c8ab7d29b9259865cd624543c7" Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.412693 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"629d703a556665e86ce3b0a5af995157440c71c8ab7d29b9259865cd624543c7"} err="failed to get container status \"629d703a556665e86ce3b0a5af995157440c71c8ab7d29b9259865cd624543c7\": rpc error: code = NotFound desc = could not find container \"629d703a556665e86ce3b0a5af995157440c71c8ab7d29b9259865cd624543c7\": container with ID starting with 629d703a556665e86ce3b0a5af995157440c71c8ab7d29b9259865cd624543c7 not found: ID does not exist" Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.412960 4779 scope.go:117] "RemoveContainer" containerID="76304a56be996b646d706c0e7df995bb1518962396c7558304b1627e5c215c11" Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.413611 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"76304a56be996b646d706c0e7df995bb1518962396c7558304b1627e5c215c11"} err="failed to get container status \"76304a56be996b646d706c0e7df995bb1518962396c7558304b1627e5c215c11\": rpc error: code = NotFound desc = could not find container \"76304a56be996b646d706c0e7df995bb1518962396c7558304b1627e5c215c11\": container with ID starting with 76304a56be996b646d706c0e7df995bb1518962396c7558304b1627e5c215c11 not found: ID does not exist" Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.413641 4779 scope.go:117] "RemoveContainer" containerID="6dcd0277739f6d5e5c90c617485f053c1c3996192f15b081c660741987ec2611" Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.414121 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6dcd0277739f6d5e5c90c617485f053c1c3996192f15b081c660741987ec2611"} err="failed to get container status \"6dcd0277739f6d5e5c90c617485f053c1c3996192f15b081c660741987ec2611\": rpc error: code = NotFound desc = could not find container \"6dcd0277739f6d5e5c90c617485f053c1c3996192f15b081c660741987ec2611\": container with ID starting with 6dcd0277739f6d5e5c90c617485f053c1c3996192f15b081c660741987ec2611 not found: ID does not exist" Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.414313 4779 scope.go:117] "RemoveContainer" containerID="e1e03c63d08ebe2a4b76954140684a5b43da9e92b93490aa7efa715fc2aefb4c" Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.414825 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e1e03c63d08ebe2a4b76954140684a5b43da9e92b93490aa7efa715fc2aefb4c"} err="failed to get container status \"e1e03c63d08ebe2a4b76954140684a5b43da9e92b93490aa7efa715fc2aefb4c\": rpc error: code = NotFound desc = could not find container \"e1e03c63d08ebe2a4b76954140684a5b43da9e92b93490aa7efa715fc2aefb4c\": container with ID starting with e1e03c63d08ebe2a4b76954140684a5b43da9e92b93490aa7efa715fc2aefb4c not found: ID does not exist" Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.414852 4779 scope.go:117] "RemoveContainer" containerID="05fd1e4418540394817185026240ded324511397fc0990e09794511ca0417885" Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.415133 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"05fd1e4418540394817185026240ded324511397fc0990e09794511ca0417885"} err="failed to get container status \"05fd1e4418540394817185026240ded324511397fc0990e09794511ca0417885\": rpc error: code = NotFound desc = could not find container \"05fd1e4418540394817185026240ded324511397fc0990e09794511ca0417885\": container with ID starting with 05fd1e4418540394817185026240ded324511397fc0990e09794511ca0417885 not found: ID does not exist" Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.415270 4779 scope.go:117] "RemoveContainer" containerID="2e04b0f18f7180f57e083d45291ba413fcda4de8f92aa53189b296e6bc5c3905" Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.416113 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2e04b0f18f7180f57e083d45291ba413fcda4de8f92aa53189b296e6bc5c3905"} err="failed to get container status \"2e04b0f18f7180f57e083d45291ba413fcda4de8f92aa53189b296e6bc5c3905\": rpc error: code = NotFound desc = could not find container \"2e04b0f18f7180f57e083d45291ba413fcda4de8f92aa53189b296e6bc5c3905\": container with ID starting with 2e04b0f18f7180f57e083d45291ba413fcda4de8f92aa53189b296e6bc5c3905 not found: ID does not exist" Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.416195 4779 scope.go:117] "RemoveContainer" containerID="c8993f5be0b77b752f60e7346f609aaeef20db2f0e72cbebde06a11fa7d2f5bb" Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.416761 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c8993f5be0b77b752f60e7346f609aaeef20db2f0e72cbebde06a11fa7d2f5bb"} err="failed to get container status \"c8993f5be0b77b752f60e7346f609aaeef20db2f0e72cbebde06a11fa7d2f5bb\": rpc error: code = NotFound desc = could not find container \"c8993f5be0b77b752f60e7346f609aaeef20db2f0e72cbebde06a11fa7d2f5bb\": container with ID starting with c8993f5be0b77b752f60e7346f609aaeef20db2f0e72cbebde06a11fa7d2f5bb not found: ID does not exist" Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.416811 4779 scope.go:117] "RemoveContainer" containerID="0d18ddf7528c789be80d32cd6921ada5ed114d01130b7be97e73ec6d9168267e" Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.417359 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0d18ddf7528c789be80d32cd6921ada5ed114d01130b7be97e73ec6d9168267e"} err="failed to get container status \"0d18ddf7528c789be80d32cd6921ada5ed114d01130b7be97e73ec6d9168267e\": rpc error: code = NotFound desc = could not find container \"0d18ddf7528c789be80d32cd6921ada5ed114d01130b7be97e73ec6d9168267e\": container with ID starting with 0d18ddf7528c789be80d32cd6921ada5ed114d01130b7be97e73ec6d9168267e not found: ID does not exist" Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.417411 4779 scope.go:117] "RemoveContainer" containerID="6b6ffbcb52a6e250cd4b4bc4e8c089ef94babaaf0db836fec9f477450464eebe" Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.419386 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6b6ffbcb52a6e250cd4b4bc4e8c089ef94babaaf0db836fec9f477450464eebe"} err="failed to get container status \"6b6ffbcb52a6e250cd4b4bc4e8c089ef94babaaf0db836fec9f477450464eebe\": rpc error: code = NotFound desc = could not find container \"6b6ffbcb52a6e250cd4b4bc4e8c089ef94babaaf0db836fec9f477450464eebe\": container with ID starting with 6b6ffbcb52a6e250cd4b4bc4e8c089ef94babaaf0db836fec9f477450464eebe not found: ID does not exist" Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.419422 4779 scope.go:117] "RemoveContainer" containerID="1555e96e70a9119cf2298d91512b1d8cffb303b4327d0e20bdf686e7e36e4461" Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.419867 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1555e96e70a9119cf2298d91512b1d8cffb303b4327d0e20bdf686e7e36e4461"} err="failed to get container status \"1555e96e70a9119cf2298d91512b1d8cffb303b4327d0e20bdf686e7e36e4461\": rpc error: code = NotFound desc = could not find container \"1555e96e70a9119cf2298d91512b1d8cffb303b4327d0e20bdf686e7e36e4461\": container with ID starting with 1555e96e70a9119cf2298d91512b1d8cffb303b4327d0e20bdf686e7e36e4461 not found: ID does not exist" Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.419914 4779 scope.go:117] "RemoveContainer" containerID="629d703a556665e86ce3b0a5af995157440c71c8ab7d29b9259865cd624543c7" Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.420410 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"629d703a556665e86ce3b0a5af995157440c71c8ab7d29b9259865cd624543c7"} err="failed to get container status \"629d703a556665e86ce3b0a5af995157440c71c8ab7d29b9259865cd624543c7\": rpc error: code = NotFound desc = could not find container \"629d703a556665e86ce3b0a5af995157440c71c8ab7d29b9259865cd624543c7\": container with ID starting with 629d703a556665e86ce3b0a5af995157440c71c8ab7d29b9259865cd624543c7 not found: ID does not exist" Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.420469 4779 scope.go:117] "RemoveContainer" containerID="76304a56be996b646d706c0e7df995bb1518962396c7558304b1627e5c215c11" Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.420924 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"76304a56be996b646d706c0e7df995bb1518962396c7558304b1627e5c215c11"} err="failed to get container status \"76304a56be996b646d706c0e7df995bb1518962396c7558304b1627e5c215c11\": rpc error: code = NotFound desc = could not find container \"76304a56be996b646d706c0e7df995bb1518962396c7558304b1627e5c215c11\": container with ID starting with 76304a56be996b646d706c0e7df995bb1518962396c7558304b1627e5c215c11 not found: ID does not exist" Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.421076 4779 scope.go:117] "RemoveContainer" containerID="6dcd0277739f6d5e5c90c617485f053c1c3996192f15b081c660741987ec2611" Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.421598 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6dcd0277739f6d5e5c90c617485f053c1c3996192f15b081c660741987ec2611"} err="failed to get container status \"6dcd0277739f6d5e5c90c617485f053c1c3996192f15b081c660741987ec2611\": rpc error: code = NotFound desc = could not find container \"6dcd0277739f6d5e5c90c617485f053c1c3996192f15b081c660741987ec2611\": container with ID starting with 6dcd0277739f6d5e5c90c617485f053c1c3996192f15b081c660741987ec2611 not found: ID does not exist" Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.421625 4779 scope.go:117] "RemoveContainer" containerID="e1e03c63d08ebe2a4b76954140684a5b43da9e92b93490aa7efa715fc2aefb4c" Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.421962 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e1e03c63d08ebe2a4b76954140684a5b43da9e92b93490aa7efa715fc2aefb4c"} err="failed to get container status \"e1e03c63d08ebe2a4b76954140684a5b43da9e92b93490aa7efa715fc2aefb4c\": rpc error: code = NotFound desc = could not find container \"e1e03c63d08ebe2a4b76954140684a5b43da9e92b93490aa7efa715fc2aefb4c\": container with ID starting with e1e03c63d08ebe2a4b76954140684a5b43da9e92b93490aa7efa715fc2aefb4c not found: ID does not exist" Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.421989 4779 scope.go:117] "RemoveContainer" containerID="05fd1e4418540394817185026240ded324511397fc0990e09794511ca0417885" Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.422366 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"05fd1e4418540394817185026240ded324511397fc0990e09794511ca0417885"} err="failed to get container status \"05fd1e4418540394817185026240ded324511397fc0990e09794511ca0417885\": rpc error: code = NotFound desc = could not find container \"05fd1e4418540394817185026240ded324511397fc0990e09794511ca0417885\": container with ID starting with 05fd1e4418540394817185026240ded324511397fc0990e09794511ca0417885 not found: ID does not exist" Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.422394 4779 scope.go:117] "RemoveContainer" containerID="2e04b0f18f7180f57e083d45291ba413fcda4de8f92aa53189b296e6bc5c3905" Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.422752 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2e04b0f18f7180f57e083d45291ba413fcda4de8f92aa53189b296e6bc5c3905"} err="failed to get container status \"2e04b0f18f7180f57e083d45291ba413fcda4de8f92aa53189b296e6bc5c3905\": rpc error: code = NotFound desc = could not find container \"2e04b0f18f7180f57e083d45291ba413fcda4de8f92aa53189b296e6bc5c3905\": container with ID starting with 2e04b0f18f7180f57e083d45291ba413fcda4de8f92aa53189b296e6bc5c3905 not found: ID does not exist" Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.422779 4779 scope.go:117] "RemoveContainer" containerID="c8993f5be0b77b752f60e7346f609aaeef20db2f0e72cbebde06a11fa7d2f5bb" Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.423049 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c8993f5be0b77b752f60e7346f609aaeef20db2f0e72cbebde06a11fa7d2f5bb"} err="failed to get container status \"c8993f5be0b77b752f60e7346f609aaeef20db2f0e72cbebde06a11fa7d2f5bb\": rpc error: code = NotFound desc = could not find container \"c8993f5be0b77b752f60e7346f609aaeef20db2f0e72cbebde06a11fa7d2f5bb\": container with ID starting with c8993f5be0b77b752f60e7346f609aaeef20db2f0e72cbebde06a11fa7d2f5bb not found: ID does not exist" Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.423076 4779 scope.go:117] "RemoveContainer" containerID="0d18ddf7528c789be80d32cd6921ada5ed114d01130b7be97e73ec6d9168267e" Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.423354 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0d18ddf7528c789be80d32cd6921ada5ed114d01130b7be97e73ec6d9168267e"} err="failed to get container status \"0d18ddf7528c789be80d32cd6921ada5ed114d01130b7be97e73ec6d9168267e\": rpc error: code = NotFound desc = could not find container \"0d18ddf7528c789be80d32cd6921ada5ed114d01130b7be97e73ec6d9168267e\": container with ID starting with 0d18ddf7528c789be80d32cd6921ada5ed114d01130b7be97e73ec6d9168267e not found: ID does not exist" Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.423380 4779 scope.go:117] "RemoveContainer" containerID="6b6ffbcb52a6e250cd4b4bc4e8c089ef94babaaf0db836fec9f477450464eebe" Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.423651 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6b6ffbcb52a6e250cd4b4bc4e8c089ef94babaaf0db836fec9f477450464eebe"} err="failed to get container status \"6b6ffbcb52a6e250cd4b4bc4e8c089ef94babaaf0db836fec9f477450464eebe\": rpc error: code = NotFound desc = could not find container \"6b6ffbcb52a6e250cd4b4bc4e8c089ef94babaaf0db836fec9f477450464eebe\": container with ID starting with 6b6ffbcb52a6e250cd4b4bc4e8c089ef94babaaf0db836fec9f477450464eebe not found: ID does not exist" Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.423677 4779 scope.go:117] "RemoveContainer" containerID="1555e96e70a9119cf2298d91512b1d8cffb303b4327d0e20bdf686e7e36e4461" Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.423979 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1555e96e70a9119cf2298d91512b1d8cffb303b4327d0e20bdf686e7e36e4461"} err="failed to get container status \"1555e96e70a9119cf2298d91512b1d8cffb303b4327d0e20bdf686e7e36e4461\": rpc error: code = NotFound desc = could not find container \"1555e96e70a9119cf2298d91512b1d8cffb303b4327d0e20bdf686e7e36e4461\": container with ID starting with 1555e96e70a9119cf2298d91512b1d8cffb303b4327d0e20bdf686e7e36e4461 not found: ID does not exist" Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.424004 4779 scope.go:117] "RemoveContainer" containerID="629d703a556665e86ce3b0a5af995157440c71c8ab7d29b9259865cd624543c7" Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.424302 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"629d703a556665e86ce3b0a5af995157440c71c8ab7d29b9259865cd624543c7"} err="failed to get container status \"629d703a556665e86ce3b0a5af995157440c71c8ab7d29b9259865cd624543c7\": rpc error: code = NotFound desc = could not find container \"629d703a556665e86ce3b0a5af995157440c71c8ab7d29b9259865cd624543c7\": container with ID starting with 629d703a556665e86ce3b0a5af995157440c71c8ab7d29b9259865cd624543c7 not found: ID does not exist" Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.424358 4779 scope.go:117] "RemoveContainer" containerID="76304a56be996b646d706c0e7df995bb1518962396c7558304b1627e5c215c11" Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.424728 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"76304a56be996b646d706c0e7df995bb1518962396c7558304b1627e5c215c11"} err="failed to get container status \"76304a56be996b646d706c0e7df995bb1518962396c7558304b1627e5c215c11\": rpc error: code = NotFound desc = could not find container \"76304a56be996b646d706c0e7df995bb1518962396c7558304b1627e5c215c11\": container with ID starting with 76304a56be996b646d706c0e7df995bb1518962396c7558304b1627e5c215c11 not found: ID does not exist" Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.424757 4779 scope.go:117] "RemoveContainer" containerID="6dcd0277739f6d5e5c90c617485f053c1c3996192f15b081c660741987ec2611" Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.425102 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6dcd0277739f6d5e5c90c617485f053c1c3996192f15b081c660741987ec2611"} err="failed to get container status \"6dcd0277739f6d5e5c90c617485f053c1c3996192f15b081c660741987ec2611\": rpc error: code = NotFound desc = could not find container \"6dcd0277739f6d5e5c90c617485f053c1c3996192f15b081c660741987ec2611\": container with ID starting with 6dcd0277739f6d5e5c90c617485f053c1c3996192f15b081c660741987ec2611 not found: ID does not exist" Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.425135 4779 scope.go:117] "RemoveContainer" containerID="e1e03c63d08ebe2a4b76954140684a5b43da9e92b93490aa7efa715fc2aefb4c" Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.425589 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e1e03c63d08ebe2a4b76954140684a5b43da9e92b93490aa7efa715fc2aefb4c"} err="failed to get container status \"e1e03c63d08ebe2a4b76954140684a5b43da9e92b93490aa7efa715fc2aefb4c\": rpc error: code = NotFound desc = could not find container \"e1e03c63d08ebe2a4b76954140684a5b43da9e92b93490aa7efa715fc2aefb4c\": container with ID starting with e1e03c63d08ebe2a4b76954140684a5b43da9e92b93490aa7efa715fc2aefb4c not found: ID does not exist" Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.490676 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-42vjg"] Sep 29 19:18:20 crc kubenswrapper[4779]: I0929 19:18:20.494394 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-42vjg"] Sep 29 19:18:21 crc kubenswrapper[4779]: I0929 19:18:21.128535 4779 generic.go:334] "Generic (PLEG): container finished" podID="c4e0517b-668f-4378-b358-67cbb7018878" containerID="d853fa8b313c5d61c6f4aed81ba2089bddf6df7d5466570cd1d1a2207de2e319" exitCode=0 Sep 29 19:18:21 crc kubenswrapper[4779]: I0929 19:18:21.128639 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-g7fr7" event={"ID":"c4e0517b-668f-4378-b358-67cbb7018878","Type":"ContainerDied","Data":"d853fa8b313c5d61c6f4aed81ba2089bddf6df7d5466570cd1d1a2207de2e319"} Sep 29 19:18:21 crc kubenswrapper[4779]: I0929 19:18:21.128886 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-g7fr7" event={"ID":"c4e0517b-668f-4378-b358-67cbb7018878","Type":"ContainerStarted","Data":"7dd70d393856b5707aaf286fa105dddd046d01477d8076a5219a5f48c70461eb"} Sep 29 19:18:21 crc kubenswrapper[4779]: I0929 19:18:21.133807 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-jfbb6_3ac24bbf-c37a-4253-be71-8d8f15cfd48e/kube-multus/2.log" Sep 29 19:18:21 crc kubenswrapper[4779]: I0929 19:18:21.774026 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="046df2ef-fb75-4d32-93e6-17b36af0a7c2" path="/var/lib/kubelet/pods/046df2ef-fb75-4d32-93e6-17b36af0a7c2/volumes" Sep 29 19:18:22 crc kubenswrapper[4779]: I0929 19:18:22.146468 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-g7fr7" event={"ID":"c4e0517b-668f-4378-b358-67cbb7018878","Type":"ContainerStarted","Data":"3d3e7a37cb9dcaa0b0a209a756e70ddea38215ef32d3ce5657280d9d48efaf29"} Sep 29 19:18:22 crc kubenswrapper[4779]: I0929 19:18:22.146528 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-g7fr7" event={"ID":"c4e0517b-668f-4378-b358-67cbb7018878","Type":"ContainerStarted","Data":"a352b56c987b7e5f98cd29a01f2289386498198a9556be65d4d7032054a4b734"} Sep 29 19:18:22 crc kubenswrapper[4779]: I0929 19:18:22.146546 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-g7fr7" event={"ID":"c4e0517b-668f-4378-b358-67cbb7018878","Type":"ContainerStarted","Data":"f4223ca4824ee127fd9c4bec6ee34b3e56594dfd258f4bc7222de4959c1e5960"} Sep 29 19:18:22 crc kubenswrapper[4779]: I0929 19:18:22.146562 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-g7fr7" event={"ID":"c4e0517b-668f-4378-b358-67cbb7018878","Type":"ContainerStarted","Data":"1c396266c3f0ae8c184c637f15df06cf0a0f4d003469f4e8396945eff8cf9bd9"} Sep 29 19:18:22 crc kubenswrapper[4779]: I0929 19:18:22.146577 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-g7fr7" event={"ID":"c4e0517b-668f-4378-b358-67cbb7018878","Type":"ContainerStarted","Data":"1bf63847bad16c11e1f722c420f23040343c36904770894729ad7b140a13bc49"} Sep 29 19:18:22 crc kubenswrapper[4779]: I0929 19:18:22.146590 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-g7fr7" event={"ID":"c4e0517b-668f-4378-b358-67cbb7018878","Type":"ContainerStarted","Data":"f9b87dd23383cf1543935ac692907c92014ff40a0b13b44663fae9181b412e92"} Sep 29 19:18:25 crc kubenswrapper[4779]: I0929 19:18:25.173163 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-g7fr7" event={"ID":"c4e0517b-668f-4378-b358-67cbb7018878","Type":"ContainerStarted","Data":"4255b9032c8acbe088eecf1b3bfa56bf9f5b98e87bf9a2100bd3568d5fe61550"} Sep 29 19:18:27 crc kubenswrapper[4779]: I0929 19:18:27.188291 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-g7fr7" event={"ID":"c4e0517b-668f-4378-b358-67cbb7018878","Type":"ContainerStarted","Data":"1d04ae9dfba699b9056f0743179bc6c3b39b20b3e9ef4c49dcaefd678c9f74d4"} Sep 29 19:18:27 crc kubenswrapper[4779]: I0929 19:18:27.189048 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-g7fr7" Sep 29 19:18:27 crc kubenswrapper[4779]: I0929 19:18:27.189065 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-g7fr7" Sep 29 19:18:27 crc kubenswrapper[4779]: I0929 19:18:27.221966 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-g7fr7" podStartSLOduration=8.221943093 podStartE2EDuration="8.221943093s" podCreationTimestamp="2025-09-29 19:18:19 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 19:18:27.220658058 +0000 UTC m=+618.105083178" watchObservedRunningTime="2025-09-29 19:18:27.221943093 +0000 UTC m=+618.106368223" Sep 29 19:18:27 crc kubenswrapper[4779]: I0929 19:18:27.242936 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-g7fr7" Sep 29 19:18:28 crc kubenswrapper[4779]: I0929 19:18:28.195687 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-g7fr7" Sep 29 19:18:28 crc kubenswrapper[4779]: I0929 19:18:28.238939 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-g7fr7" Sep 29 19:18:35 crc kubenswrapper[4779]: I0929 19:18:35.766577 4779 scope.go:117] "RemoveContainer" containerID="5c0164ef77822b4eb85e02d13c7c209f76c7e034e09c640eb410639572e171b5" Sep 29 19:18:35 crc kubenswrapper[4779]: E0929 19:18:35.767650 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 20s restarting failed container=kube-multus pod=multus-jfbb6_openshift-multus(3ac24bbf-c37a-4253-be71-8d8f15cfd48e)\"" pod="openshift-multus/multus-jfbb6" podUID="3ac24bbf-c37a-4253-be71-8d8f15cfd48e" Sep 29 19:18:43 crc kubenswrapper[4779]: I0929 19:18:43.785008 4779 patch_prober.go:28] interesting pod/machine-config-daemon-d5cnr container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 19:18:43 crc kubenswrapper[4779]: I0929 19:18:43.785634 4779 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" podUID="476bc421-1113-455e-bcc8-e207e47dad19" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 19:18:43 crc kubenswrapper[4779]: I0929 19:18:43.785687 4779 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" Sep 29 19:18:43 crc kubenswrapper[4779]: I0929 19:18:43.786605 4779 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"b3ba67db5fe746a0ad4fd3ae08a4d76a8d4a2ecb2a07aee398af84195906c334"} pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 29 19:18:43 crc kubenswrapper[4779]: I0929 19:18:43.786694 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" podUID="476bc421-1113-455e-bcc8-e207e47dad19" containerName="machine-config-daemon" containerID="cri-o://b3ba67db5fe746a0ad4fd3ae08a4d76a8d4a2ecb2a07aee398af84195906c334" gracePeriod=600 Sep 29 19:18:44 crc kubenswrapper[4779]: I0929 19:18:44.307442 4779 generic.go:334] "Generic (PLEG): container finished" podID="476bc421-1113-455e-bcc8-e207e47dad19" containerID="b3ba67db5fe746a0ad4fd3ae08a4d76a8d4a2ecb2a07aee398af84195906c334" exitCode=0 Sep 29 19:18:44 crc kubenswrapper[4779]: I0929 19:18:44.307559 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" event={"ID":"476bc421-1113-455e-bcc8-e207e47dad19","Type":"ContainerDied","Data":"b3ba67db5fe746a0ad4fd3ae08a4d76a8d4a2ecb2a07aee398af84195906c334"} Sep 29 19:18:44 crc kubenswrapper[4779]: I0929 19:18:44.307880 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" event={"ID":"476bc421-1113-455e-bcc8-e207e47dad19","Type":"ContainerStarted","Data":"4a4fdaef3556a5f3b6feb69078b0e45220cdb9c5faaa10378f31938118c0fbae"} Sep 29 19:18:44 crc kubenswrapper[4779]: I0929 19:18:44.307916 4779 scope.go:117] "RemoveContainer" containerID="723d137f12ddc65960396536e98fe092ad24733cca2a22667ac4ae7a3c589f72" Sep 29 19:18:48 crc kubenswrapper[4779]: I0929 19:18:48.766682 4779 scope.go:117] "RemoveContainer" containerID="5c0164ef77822b4eb85e02d13c7c209f76c7e034e09c640eb410639572e171b5" Sep 29 19:18:49 crc kubenswrapper[4779]: I0929 19:18:49.350219 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-jfbb6_3ac24bbf-c37a-4253-be71-8d8f15cfd48e/kube-multus/2.log" Sep 29 19:18:49 crc kubenswrapper[4779]: I0929 19:18:49.350892 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-jfbb6" event={"ID":"3ac24bbf-c37a-4253-be71-8d8f15cfd48e","Type":"ContainerStarted","Data":"775448a836ab2680040a4553ac772b1e66d52094870637c95324c68adabf7976"} Sep 29 19:18:50 crc kubenswrapper[4779]: I0929 19:18:50.356246 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-g7fr7" Sep 29 19:18:59 crc kubenswrapper[4779]: I0929 19:18:59.157371 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcwrmrj"] Sep 29 19:18:59 crc kubenswrapper[4779]: I0929 19:18:59.159795 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcwrmrj" Sep 29 19:18:59 crc kubenswrapper[4779]: I0929 19:18:59.167716 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Sep 29 19:18:59 crc kubenswrapper[4779]: I0929 19:18:59.175985 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcwrmrj"] Sep 29 19:18:59 crc kubenswrapper[4779]: I0929 19:18:59.262555 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w49js\" (UniqueName: \"kubernetes.io/projected/96d504b6-b993-413d-aa0a-6406515e6008-kube-api-access-w49js\") pod \"9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcwrmrj\" (UID: \"96d504b6-b993-413d-aa0a-6406515e6008\") " pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcwrmrj" Sep 29 19:18:59 crc kubenswrapper[4779]: I0929 19:18:59.262746 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/96d504b6-b993-413d-aa0a-6406515e6008-bundle\") pod \"9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcwrmrj\" (UID: \"96d504b6-b993-413d-aa0a-6406515e6008\") " pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcwrmrj" Sep 29 19:18:59 crc kubenswrapper[4779]: I0929 19:18:59.262850 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/96d504b6-b993-413d-aa0a-6406515e6008-util\") pod \"9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcwrmrj\" (UID: \"96d504b6-b993-413d-aa0a-6406515e6008\") " pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcwrmrj" Sep 29 19:18:59 crc kubenswrapper[4779]: I0929 19:18:59.363933 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w49js\" (UniqueName: \"kubernetes.io/projected/96d504b6-b993-413d-aa0a-6406515e6008-kube-api-access-w49js\") pod \"9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcwrmrj\" (UID: \"96d504b6-b993-413d-aa0a-6406515e6008\") " pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcwrmrj" Sep 29 19:18:59 crc kubenswrapper[4779]: I0929 19:18:59.364110 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/96d504b6-b993-413d-aa0a-6406515e6008-bundle\") pod \"9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcwrmrj\" (UID: \"96d504b6-b993-413d-aa0a-6406515e6008\") " pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcwrmrj" Sep 29 19:18:59 crc kubenswrapper[4779]: I0929 19:18:59.364211 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/96d504b6-b993-413d-aa0a-6406515e6008-util\") pod \"9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcwrmrj\" (UID: \"96d504b6-b993-413d-aa0a-6406515e6008\") " pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcwrmrj" Sep 29 19:18:59 crc kubenswrapper[4779]: I0929 19:18:59.365278 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/96d504b6-b993-413d-aa0a-6406515e6008-bundle\") pod \"9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcwrmrj\" (UID: \"96d504b6-b993-413d-aa0a-6406515e6008\") " pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcwrmrj" Sep 29 19:18:59 crc kubenswrapper[4779]: I0929 19:18:59.365306 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/96d504b6-b993-413d-aa0a-6406515e6008-util\") pod \"9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcwrmrj\" (UID: \"96d504b6-b993-413d-aa0a-6406515e6008\") " pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcwrmrj" Sep 29 19:18:59 crc kubenswrapper[4779]: I0929 19:18:59.398738 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w49js\" (UniqueName: \"kubernetes.io/projected/96d504b6-b993-413d-aa0a-6406515e6008-kube-api-access-w49js\") pod \"9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcwrmrj\" (UID: \"96d504b6-b993-413d-aa0a-6406515e6008\") " pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcwrmrj" Sep 29 19:18:59 crc kubenswrapper[4779]: I0929 19:18:59.492973 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcwrmrj" Sep 29 19:18:59 crc kubenswrapper[4779]: I0929 19:18:59.712216 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcwrmrj"] Sep 29 19:19:00 crc kubenswrapper[4779]: I0929 19:19:00.432471 4779 generic.go:334] "Generic (PLEG): container finished" podID="96d504b6-b993-413d-aa0a-6406515e6008" containerID="8a655c0709138b8e47e79bff7c987426183ebf6fe1f3d2f3058d06ae5ba81f30" exitCode=0 Sep 29 19:19:00 crc kubenswrapper[4779]: I0929 19:19:00.432639 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcwrmrj" event={"ID":"96d504b6-b993-413d-aa0a-6406515e6008","Type":"ContainerDied","Data":"8a655c0709138b8e47e79bff7c987426183ebf6fe1f3d2f3058d06ae5ba81f30"} Sep 29 19:19:00 crc kubenswrapper[4779]: I0929 19:19:00.432991 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcwrmrj" event={"ID":"96d504b6-b993-413d-aa0a-6406515e6008","Type":"ContainerStarted","Data":"7a7d7d71e996a18da1b6eea5e1062b6ea2fb84944f17b70108d44f118d73def8"} Sep 29 19:19:04 crc kubenswrapper[4779]: I0929 19:19:04.463368 4779 generic.go:334] "Generic (PLEG): container finished" podID="96d504b6-b993-413d-aa0a-6406515e6008" containerID="d1c0211c6d4aacfa04da5fb708f12cc5d203cf2e7fc4a734bb04c7c48fa2c6bd" exitCode=0 Sep 29 19:19:04 crc kubenswrapper[4779]: I0929 19:19:04.463472 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcwrmrj" event={"ID":"96d504b6-b993-413d-aa0a-6406515e6008","Type":"ContainerDied","Data":"d1c0211c6d4aacfa04da5fb708f12cc5d203cf2e7fc4a734bb04c7c48fa2c6bd"} Sep 29 19:19:05 crc kubenswrapper[4779]: I0929 19:19:05.475941 4779 generic.go:334] "Generic (PLEG): container finished" podID="96d504b6-b993-413d-aa0a-6406515e6008" containerID="42912a3faeaf57095fd4fe5259167eadd5f78fb3b83e0542267b599b188829c5" exitCode=0 Sep 29 19:19:05 crc kubenswrapper[4779]: I0929 19:19:05.475996 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcwrmrj" event={"ID":"96d504b6-b993-413d-aa0a-6406515e6008","Type":"ContainerDied","Data":"42912a3faeaf57095fd4fe5259167eadd5f78fb3b83e0542267b599b188829c5"} Sep 29 19:19:06 crc kubenswrapper[4779]: I0929 19:19:06.793101 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcwrmrj" Sep 29 19:19:06 crc kubenswrapper[4779]: I0929 19:19:06.967093 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/96d504b6-b993-413d-aa0a-6406515e6008-bundle\") pod \"96d504b6-b993-413d-aa0a-6406515e6008\" (UID: \"96d504b6-b993-413d-aa0a-6406515e6008\") " Sep 29 19:19:06 crc kubenswrapper[4779]: I0929 19:19:06.967180 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w49js\" (UniqueName: \"kubernetes.io/projected/96d504b6-b993-413d-aa0a-6406515e6008-kube-api-access-w49js\") pod \"96d504b6-b993-413d-aa0a-6406515e6008\" (UID: \"96d504b6-b993-413d-aa0a-6406515e6008\") " Sep 29 19:19:06 crc kubenswrapper[4779]: I0929 19:19:06.967230 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/96d504b6-b993-413d-aa0a-6406515e6008-util\") pod \"96d504b6-b993-413d-aa0a-6406515e6008\" (UID: \"96d504b6-b993-413d-aa0a-6406515e6008\") " Sep 29 19:19:06 crc kubenswrapper[4779]: I0929 19:19:06.968929 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/96d504b6-b993-413d-aa0a-6406515e6008-bundle" (OuterVolumeSpecName: "bundle") pod "96d504b6-b993-413d-aa0a-6406515e6008" (UID: "96d504b6-b993-413d-aa0a-6406515e6008"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 19:19:06 crc kubenswrapper[4779]: I0929 19:19:06.973973 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/96d504b6-b993-413d-aa0a-6406515e6008-kube-api-access-w49js" (OuterVolumeSpecName: "kube-api-access-w49js") pod "96d504b6-b993-413d-aa0a-6406515e6008" (UID: "96d504b6-b993-413d-aa0a-6406515e6008"). InnerVolumeSpecName "kube-api-access-w49js". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:19:06 crc kubenswrapper[4779]: I0929 19:19:06.977549 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/96d504b6-b993-413d-aa0a-6406515e6008-util" (OuterVolumeSpecName: "util") pod "96d504b6-b993-413d-aa0a-6406515e6008" (UID: "96d504b6-b993-413d-aa0a-6406515e6008"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 19:19:07 crc kubenswrapper[4779]: I0929 19:19:07.068405 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w49js\" (UniqueName: \"kubernetes.io/projected/96d504b6-b993-413d-aa0a-6406515e6008-kube-api-access-w49js\") on node \"crc\" DevicePath \"\"" Sep 29 19:19:07 crc kubenswrapper[4779]: I0929 19:19:07.068451 4779 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/96d504b6-b993-413d-aa0a-6406515e6008-util\") on node \"crc\" DevicePath \"\"" Sep 29 19:19:07 crc kubenswrapper[4779]: I0929 19:19:07.068465 4779 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/96d504b6-b993-413d-aa0a-6406515e6008-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 19:19:07 crc kubenswrapper[4779]: I0929 19:19:07.491521 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcwrmrj" event={"ID":"96d504b6-b993-413d-aa0a-6406515e6008","Type":"ContainerDied","Data":"7a7d7d71e996a18da1b6eea5e1062b6ea2fb84944f17b70108d44f118d73def8"} Sep 29 19:19:07 crc kubenswrapper[4779]: I0929 19:19:07.491648 4779 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7a7d7d71e996a18da1b6eea5e1062b6ea2fb84944f17b70108d44f118d73def8" Sep 29 19:19:07 crc kubenswrapper[4779]: I0929 19:19:07.491603 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcwrmrj" Sep 29 19:19:10 crc kubenswrapper[4779]: I0929 19:19:10.741406 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-operator-5d6f6cfd66-98lp2"] Sep 29 19:19:10 crc kubenswrapper[4779]: E0929 19:19:10.742060 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="96d504b6-b993-413d-aa0a-6406515e6008" containerName="extract" Sep 29 19:19:10 crc kubenswrapper[4779]: I0929 19:19:10.742071 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="96d504b6-b993-413d-aa0a-6406515e6008" containerName="extract" Sep 29 19:19:10 crc kubenswrapper[4779]: E0929 19:19:10.742079 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="96d504b6-b993-413d-aa0a-6406515e6008" containerName="util" Sep 29 19:19:10 crc kubenswrapper[4779]: I0929 19:19:10.742085 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="96d504b6-b993-413d-aa0a-6406515e6008" containerName="util" Sep 29 19:19:10 crc kubenswrapper[4779]: E0929 19:19:10.742107 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="96d504b6-b993-413d-aa0a-6406515e6008" containerName="pull" Sep 29 19:19:10 crc kubenswrapper[4779]: I0929 19:19:10.742112 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="96d504b6-b993-413d-aa0a-6406515e6008" containerName="pull" Sep 29 19:19:10 crc kubenswrapper[4779]: I0929 19:19:10.742197 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="96d504b6-b993-413d-aa0a-6406515e6008" containerName="extract" Sep 29 19:19:10 crc kubenswrapper[4779]: I0929 19:19:10.742544 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-operator-5d6f6cfd66-98lp2" Sep 29 19:19:10 crc kubenswrapper[4779]: I0929 19:19:10.745127 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"nmstate-operator-dockercfg-29bzw" Sep 29 19:19:10 crc kubenswrapper[4779]: I0929 19:19:10.745127 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"openshift-service-ca.crt" Sep 29 19:19:10 crc kubenswrapper[4779]: I0929 19:19:10.745260 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"kube-root-ca.crt" Sep 29 19:19:10 crc kubenswrapper[4779]: I0929 19:19:10.757715 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-operator-5d6f6cfd66-98lp2"] Sep 29 19:19:10 crc kubenswrapper[4779]: I0929 19:19:10.918704 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-krs2p\" (UniqueName: \"kubernetes.io/projected/5df72cf6-fbb4-4160-afad-5fb056e747ed-kube-api-access-krs2p\") pod \"nmstate-operator-5d6f6cfd66-98lp2\" (UID: \"5df72cf6-fbb4-4160-afad-5fb056e747ed\") " pod="openshift-nmstate/nmstate-operator-5d6f6cfd66-98lp2" Sep 29 19:19:11 crc kubenswrapper[4779]: I0929 19:19:11.020217 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-krs2p\" (UniqueName: \"kubernetes.io/projected/5df72cf6-fbb4-4160-afad-5fb056e747ed-kube-api-access-krs2p\") pod \"nmstate-operator-5d6f6cfd66-98lp2\" (UID: \"5df72cf6-fbb4-4160-afad-5fb056e747ed\") " pod="openshift-nmstate/nmstate-operator-5d6f6cfd66-98lp2" Sep 29 19:19:11 crc kubenswrapper[4779]: I0929 19:19:11.057193 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-krs2p\" (UniqueName: \"kubernetes.io/projected/5df72cf6-fbb4-4160-afad-5fb056e747ed-kube-api-access-krs2p\") pod \"nmstate-operator-5d6f6cfd66-98lp2\" (UID: \"5df72cf6-fbb4-4160-afad-5fb056e747ed\") " pod="openshift-nmstate/nmstate-operator-5d6f6cfd66-98lp2" Sep 29 19:19:11 crc kubenswrapper[4779]: I0929 19:19:11.357098 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-operator-5d6f6cfd66-98lp2" Sep 29 19:19:11 crc kubenswrapper[4779]: I0929 19:19:11.650607 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-operator-5d6f6cfd66-98lp2"] Sep 29 19:19:11 crc kubenswrapper[4779]: W0929 19:19:11.665214 4779 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5df72cf6_fbb4_4160_afad_5fb056e747ed.slice/crio-6e1aaf98fbc1d380b3fd6892c722b8fab9e9d85e35519d9983376160747079cb WatchSource:0}: Error finding container 6e1aaf98fbc1d380b3fd6892c722b8fab9e9d85e35519d9983376160747079cb: Status 404 returned error can't find the container with id 6e1aaf98fbc1d380b3fd6892c722b8fab9e9d85e35519d9983376160747079cb Sep 29 19:19:12 crc kubenswrapper[4779]: I0929 19:19:12.523923 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-operator-5d6f6cfd66-98lp2" event={"ID":"5df72cf6-fbb4-4160-afad-5fb056e747ed","Type":"ContainerStarted","Data":"6e1aaf98fbc1d380b3fd6892c722b8fab9e9d85e35519d9983376160747079cb"} Sep 29 19:19:14 crc kubenswrapper[4779]: I0929 19:19:14.536592 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-operator-5d6f6cfd66-98lp2" event={"ID":"5df72cf6-fbb4-4160-afad-5fb056e747ed","Type":"ContainerStarted","Data":"540ac5f74236c947b05a6a0943ec4a74e5c7a46fe07e2b7cf82eb32a0df71083"} Sep 29 19:19:14 crc kubenswrapper[4779]: I0929 19:19:14.562117 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-operator-5d6f6cfd66-98lp2" podStartSLOduration=2.551233645 podStartE2EDuration="4.562094138s" podCreationTimestamp="2025-09-29 19:19:10 +0000 UTC" firstStartedPulling="2025-09-29 19:19:11.670284057 +0000 UTC m=+662.554709157" lastFinishedPulling="2025-09-29 19:19:13.68114455 +0000 UTC m=+664.565569650" observedRunningTime="2025-09-29 19:19:14.556895307 +0000 UTC m=+665.441320447" watchObservedRunningTime="2025-09-29 19:19:14.562094138 +0000 UTC m=+665.446519268" Sep 29 19:19:19 crc kubenswrapper[4779]: I0929 19:19:19.618456 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-metrics-58fcddf996-zwk4x"] Sep 29 19:19:19 crc kubenswrapper[4779]: I0929 19:19:19.619944 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-metrics-58fcddf996-zwk4x" Sep 29 19:19:19 crc kubenswrapper[4779]: I0929 19:19:19.623337 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"nmstate-handler-dockercfg-fzszx" Sep 29 19:19:19 crc kubenswrapper[4779]: I0929 19:19:19.635427 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-webhook-6d689559c5-qhlnm"] Sep 29 19:19:19 crc kubenswrapper[4779]: I0929 19:19:19.636216 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-webhook-6d689559c5-qhlnm" Sep 29 19:19:19 crc kubenswrapper[4779]: I0929 19:19:19.638283 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"openshift-nmstate-webhook" Sep 29 19:19:19 crc kubenswrapper[4779]: I0929 19:19:19.639213 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-metrics-58fcddf996-zwk4x"] Sep 29 19:19:19 crc kubenswrapper[4779]: I0929 19:19:19.655518 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-webhook-6d689559c5-qhlnm"] Sep 29 19:19:19 crc kubenswrapper[4779]: I0929 19:19:19.683529 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-handler-v9vn6"] Sep 29 19:19:19 crc kubenswrapper[4779]: I0929 19:19:19.684412 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-handler-v9vn6" Sep 29 19:19:19 crc kubenswrapper[4779]: I0929 19:19:19.739364 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/ef026a49-4282-45ef-b535-288ac25fe011-tls-key-pair\") pod \"nmstate-webhook-6d689559c5-qhlnm\" (UID: \"ef026a49-4282-45ef-b535-288ac25fe011\") " pod="openshift-nmstate/nmstate-webhook-6d689559c5-qhlnm" Sep 29 19:19:19 crc kubenswrapper[4779]: I0929 19:19:19.739422 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wclpc\" (UniqueName: \"kubernetes.io/projected/ef026a49-4282-45ef-b535-288ac25fe011-kube-api-access-wclpc\") pod \"nmstate-webhook-6d689559c5-qhlnm\" (UID: \"ef026a49-4282-45ef-b535-288ac25fe011\") " pod="openshift-nmstate/nmstate-webhook-6d689559c5-qhlnm" Sep 29 19:19:19 crc kubenswrapper[4779]: I0929 19:19:19.739480 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cpjpq\" (UniqueName: \"kubernetes.io/projected/c0c8d102-b0f5-4ebd-ad41-3359fd330e5c-kube-api-access-cpjpq\") pod \"nmstate-metrics-58fcddf996-zwk4x\" (UID: \"c0c8d102-b0f5-4ebd-ad41-3359fd330e5c\") " pod="openshift-nmstate/nmstate-metrics-58fcddf996-zwk4x" Sep 29 19:19:19 crc kubenswrapper[4779]: I0929 19:19:19.773618 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-console-plugin-864bb6dfb5-nvgwc"] Sep 29 19:19:19 crc kubenswrapper[4779]: I0929 19:19:19.774344 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-nvgwc" Sep 29 19:19:19 crc kubenswrapper[4779]: I0929 19:19:19.775690 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"plugin-serving-cert" Sep 29 19:19:19 crc kubenswrapper[4779]: I0929 19:19:19.775918 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"nginx-conf" Sep 29 19:19:19 crc kubenswrapper[4779]: I0929 19:19:19.775973 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"default-dockercfg-cvmw4" Sep 29 19:19:19 crc kubenswrapper[4779]: I0929 19:19:19.801492 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-console-plugin-864bb6dfb5-nvgwc"] Sep 29 19:19:19 crc kubenswrapper[4779]: I0929 19:19:19.840883 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/c74d7452-c58a-4336-9acf-acc9190816a9-nmstate-lock\") pod \"nmstate-handler-v9vn6\" (UID: \"c74d7452-c58a-4336-9acf-acc9190816a9\") " pod="openshift-nmstate/nmstate-handler-v9vn6" Sep 29 19:19:19 crc kubenswrapper[4779]: I0929 19:19:19.840944 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/ef026a49-4282-45ef-b535-288ac25fe011-tls-key-pair\") pod \"nmstate-webhook-6d689559c5-qhlnm\" (UID: \"ef026a49-4282-45ef-b535-288ac25fe011\") " pod="openshift-nmstate/nmstate-webhook-6d689559c5-qhlnm" Sep 29 19:19:19 crc kubenswrapper[4779]: I0929 19:19:19.840989 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wclpc\" (UniqueName: \"kubernetes.io/projected/ef026a49-4282-45ef-b535-288ac25fe011-kube-api-access-wclpc\") pod \"nmstate-webhook-6d689559c5-qhlnm\" (UID: \"ef026a49-4282-45ef-b535-288ac25fe011\") " pod="openshift-nmstate/nmstate-webhook-6d689559c5-qhlnm" Sep 29 19:19:19 crc kubenswrapper[4779]: I0929 19:19:19.841017 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-swr8p\" (UniqueName: \"kubernetes.io/projected/c74d7452-c58a-4336-9acf-acc9190816a9-kube-api-access-swr8p\") pod \"nmstate-handler-v9vn6\" (UID: \"c74d7452-c58a-4336-9acf-acc9190816a9\") " pod="openshift-nmstate/nmstate-handler-v9vn6" Sep 29 19:19:19 crc kubenswrapper[4779]: I0929 19:19:19.841046 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/c74d7452-c58a-4336-9acf-acc9190816a9-dbus-socket\") pod \"nmstate-handler-v9vn6\" (UID: \"c74d7452-c58a-4336-9acf-acc9190816a9\") " pod="openshift-nmstate/nmstate-handler-v9vn6" Sep 29 19:19:19 crc kubenswrapper[4779]: I0929 19:19:19.841080 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cpjpq\" (UniqueName: \"kubernetes.io/projected/c0c8d102-b0f5-4ebd-ad41-3359fd330e5c-kube-api-access-cpjpq\") pod \"nmstate-metrics-58fcddf996-zwk4x\" (UID: \"c0c8d102-b0f5-4ebd-ad41-3359fd330e5c\") " pod="openshift-nmstate/nmstate-metrics-58fcddf996-zwk4x" Sep 29 19:19:19 crc kubenswrapper[4779]: I0929 19:19:19.841127 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/c74d7452-c58a-4336-9acf-acc9190816a9-ovs-socket\") pod \"nmstate-handler-v9vn6\" (UID: \"c74d7452-c58a-4336-9acf-acc9190816a9\") " pod="openshift-nmstate/nmstate-handler-v9vn6" Sep 29 19:19:19 crc kubenswrapper[4779]: I0929 19:19:19.855173 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/ef026a49-4282-45ef-b535-288ac25fe011-tls-key-pair\") pod \"nmstate-webhook-6d689559c5-qhlnm\" (UID: \"ef026a49-4282-45ef-b535-288ac25fe011\") " pod="openshift-nmstate/nmstate-webhook-6d689559c5-qhlnm" Sep 29 19:19:19 crc kubenswrapper[4779]: I0929 19:19:19.858434 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wclpc\" (UniqueName: \"kubernetes.io/projected/ef026a49-4282-45ef-b535-288ac25fe011-kube-api-access-wclpc\") pod \"nmstate-webhook-6d689559c5-qhlnm\" (UID: \"ef026a49-4282-45ef-b535-288ac25fe011\") " pod="openshift-nmstate/nmstate-webhook-6d689559c5-qhlnm" Sep 29 19:19:19 crc kubenswrapper[4779]: I0929 19:19:19.861040 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cpjpq\" (UniqueName: \"kubernetes.io/projected/c0c8d102-b0f5-4ebd-ad41-3359fd330e5c-kube-api-access-cpjpq\") pod \"nmstate-metrics-58fcddf996-zwk4x\" (UID: \"c0c8d102-b0f5-4ebd-ad41-3359fd330e5c\") " pod="openshift-nmstate/nmstate-metrics-58fcddf996-zwk4x" Sep 29 19:19:19 crc kubenswrapper[4779]: I0929 19:19:19.942682 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p4lln\" (UniqueName: \"kubernetes.io/projected/6c719c4d-6f34-4427-8c72-69a5c0efe754-kube-api-access-p4lln\") pod \"nmstate-console-plugin-864bb6dfb5-nvgwc\" (UID: \"6c719c4d-6f34-4427-8c72-69a5c0efe754\") " pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-nvgwc" Sep 29 19:19:19 crc kubenswrapper[4779]: I0929 19:19:19.942765 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/c74d7452-c58a-4336-9acf-acc9190816a9-nmstate-lock\") pod \"nmstate-handler-v9vn6\" (UID: \"c74d7452-c58a-4336-9acf-acc9190816a9\") " pod="openshift-nmstate/nmstate-handler-v9vn6" Sep 29 19:19:19 crc kubenswrapper[4779]: I0929 19:19:19.942813 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/6c719c4d-6f34-4427-8c72-69a5c0efe754-plugin-serving-cert\") pod \"nmstate-console-plugin-864bb6dfb5-nvgwc\" (UID: \"6c719c4d-6f34-4427-8c72-69a5c0efe754\") " pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-nvgwc" Sep 29 19:19:19 crc kubenswrapper[4779]: I0929 19:19:19.942856 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/6c719c4d-6f34-4427-8c72-69a5c0efe754-nginx-conf\") pod \"nmstate-console-plugin-864bb6dfb5-nvgwc\" (UID: \"6c719c4d-6f34-4427-8c72-69a5c0efe754\") " pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-nvgwc" Sep 29 19:19:19 crc kubenswrapper[4779]: I0929 19:19:19.942879 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-swr8p\" (UniqueName: \"kubernetes.io/projected/c74d7452-c58a-4336-9acf-acc9190816a9-kube-api-access-swr8p\") pod \"nmstate-handler-v9vn6\" (UID: \"c74d7452-c58a-4336-9acf-acc9190816a9\") " pod="openshift-nmstate/nmstate-handler-v9vn6" Sep 29 19:19:19 crc kubenswrapper[4779]: I0929 19:19:19.942953 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/c74d7452-c58a-4336-9acf-acc9190816a9-dbus-socket\") pod \"nmstate-handler-v9vn6\" (UID: \"c74d7452-c58a-4336-9acf-acc9190816a9\") " pod="openshift-nmstate/nmstate-handler-v9vn6" Sep 29 19:19:19 crc kubenswrapper[4779]: I0929 19:19:19.943051 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/c74d7452-c58a-4336-9acf-acc9190816a9-ovs-socket\") pod \"nmstate-handler-v9vn6\" (UID: \"c74d7452-c58a-4336-9acf-acc9190816a9\") " pod="openshift-nmstate/nmstate-handler-v9vn6" Sep 29 19:19:19 crc kubenswrapper[4779]: I0929 19:19:19.943141 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/c74d7452-c58a-4336-9acf-acc9190816a9-ovs-socket\") pod \"nmstate-handler-v9vn6\" (UID: \"c74d7452-c58a-4336-9acf-acc9190816a9\") " pod="openshift-nmstate/nmstate-handler-v9vn6" Sep 29 19:19:19 crc kubenswrapper[4779]: I0929 19:19:19.943200 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/c74d7452-c58a-4336-9acf-acc9190816a9-nmstate-lock\") pod \"nmstate-handler-v9vn6\" (UID: \"c74d7452-c58a-4336-9acf-acc9190816a9\") " pod="openshift-nmstate/nmstate-handler-v9vn6" Sep 29 19:19:19 crc kubenswrapper[4779]: I0929 19:19:19.944709 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/c74d7452-c58a-4336-9acf-acc9190816a9-dbus-socket\") pod \"nmstate-handler-v9vn6\" (UID: \"c74d7452-c58a-4336-9acf-acc9190816a9\") " pod="openshift-nmstate/nmstate-handler-v9vn6" Sep 29 19:19:19 crc kubenswrapper[4779]: I0929 19:19:19.946695 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-metrics-58fcddf996-zwk4x" Sep 29 19:19:19 crc kubenswrapper[4779]: I0929 19:19:19.950070 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-57f48b5f7f-9lvsh"] Sep 29 19:19:19 crc kubenswrapper[4779]: I0929 19:19:19.950795 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-57f48b5f7f-9lvsh" Sep 29 19:19:20 crc kubenswrapper[4779]: I0929 19:19:20.023333 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-57f48b5f7f-9lvsh"] Sep 29 19:19:20 crc kubenswrapper[4779]: I0929 19:19:20.023929 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-swr8p\" (UniqueName: \"kubernetes.io/projected/c74d7452-c58a-4336-9acf-acc9190816a9-kube-api-access-swr8p\") pod \"nmstate-handler-v9vn6\" (UID: \"c74d7452-c58a-4336-9acf-acc9190816a9\") " pod="openshift-nmstate/nmstate-handler-v9vn6" Sep 29 19:19:20 crc kubenswrapper[4779]: I0929 19:19:20.027230 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-webhook-6d689559c5-qhlnm" Sep 29 19:19:20 crc kubenswrapper[4779]: I0929 19:19:20.044333 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p4lln\" (UniqueName: \"kubernetes.io/projected/6c719c4d-6f34-4427-8c72-69a5c0efe754-kube-api-access-p4lln\") pod \"nmstate-console-plugin-864bb6dfb5-nvgwc\" (UID: \"6c719c4d-6f34-4427-8c72-69a5c0efe754\") " pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-nvgwc" Sep 29 19:19:20 crc kubenswrapper[4779]: I0929 19:19:20.044835 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/6c719c4d-6f34-4427-8c72-69a5c0efe754-plugin-serving-cert\") pod \"nmstate-console-plugin-864bb6dfb5-nvgwc\" (UID: \"6c719c4d-6f34-4427-8c72-69a5c0efe754\") " pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-nvgwc" Sep 29 19:19:20 crc kubenswrapper[4779]: I0929 19:19:20.045007 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/6c719c4d-6f34-4427-8c72-69a5c0efe754-nginx-conf\") pod \"nmstate-console-plugin-864bb6dfb5-nvgwc\" (UID: \"6c719c4d-6f34-4427-8c72-69a5c0efe754\") " pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-nvgwc" Sep 29 19:19:20 crc kubenswrapper[4779]: I0929 19:19:20.046004 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/6c719c4d-6f34-4427-8c72-69a5c0efe754-nginx-conf\") pod \"nmstate-console-plugin-864bb6dfb5-nvgwc\" (UID: \"6c719c4d-6f34-4427-8c72-69a5c0efe754\") " pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-nvgwc" Sep 29 19:19:20 crc kubenswrapper[4779]: I0929 19:19:20.047998 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/6c719c4d-6f34-4427-8c72-69a5c0efe754-plugin-serving-cert\") pod \"nmstate-console-plugin-864bb6dfb5-nvgwc\" (UID: \"6c719c4d-6f34-4427-8c72-69a5c0efe754\") " pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-nvgwc" Sep 29 19:19:20 crc kubenswrapper[4779]: I0929 19:19:20.063580 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p4lln\" (UniqueName: \"kubernetes.io/projected/6c719c4d-6f34-4427-8c72-69a5c0efe754-kube-api-access-p4lln\") pod \"nmstate-console-plugin-864bb6dfb5-nvgwc\" (UID: \"6c719c4d-6f34-4427-8c72-69a5c0efe754\") " pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-nvgwc" Sep 29 19:19:20 crc kubenswrapper[4779]: I0929 19:19:20.088216 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-nvgwc" Sep 29 19:19:20 crc kubenswrapper[4779]: I0929 19:19:20.147163 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/9f9243a2-2e86-40cd-b16f-05ebab05025c-console-config\") pod \"console-57f48b5f7f-9lvsh\" (UID: \"9f9243a2-2e86-40cd-b16f-05ebab05025c\") " pod="openshift-console/console-57f48b5f7f-9lvsh" Sep 29 19:19:20 crc kubenswrapper[4779]: I0929 19:19:20.147237 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hgbgf\" (UniqueName: \"kubernetes.io/projected/9f9243a2-2e86-40cd-b16f-05ebab05025c-kube-api-access-hgbgf\") pod \"console-57f48b5f7f-9lvsh\" (UID: \"9f9243a2-2e86-40cd-b16f-05ebab05025c\") " pod="openshift-console/console-57f48b5f7f-9lvsh" Sep 29 19:19:20 crc kubenswrapper[4779]: I0929 19:19:20.147261 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/9f9243a2-2e86-40cd-b16f-05ebab05025c-oauth-serving-cert\") pod \"console-57f48b5f7f-9lvsh\" (UID: \"9f9243a2-2e86-40cd-b16f-05ebab05025c\") " pod="openshift-console/console-57f48b5f7f-9lvsh" Sep 29 19:19:20 crc kubenswrapper[4779]: I0929 19:19:20.147284 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/9f9243a2-2e86-40cd-b16f-05ebab05025c-console-oauth-config\") pod \"console-57f48b5f7f-9lvsh\" (UID: \"9f9243a2-2e86-40cd-b16f-05ebab05025c\") " pod="openshift-console/console-57f48b5f7f-9lvsh" Sep 29 19:19:20 crc kubenswrapper[4779]: I0929 19:19:20.147302 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/9f9243a2-2e86-40cd-b16f-05ebab05025c-console-serving-cert\") pod \"console-57f48b5f7f-9lvsh\" (UID: \"9f9243a2-2e86-40cd-b16f-05ebab05025c\") " pod="openshift-console/console-57f48b5f7f-9lvsh" Sep 29 19:19:20 crc kubenswrapper[4779]: I0929 19:19:20.147471 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/9f9243a2-2e86-40cd-b16f-05ebab05025c-service-ca\") pod \"console-57f48b5f7f-9lvsh\" (UID: \"9f9243a2-2e86-40cd-b16f-05ebab05025c\") " pod="openshift-console/console-57f48b5f7f-9lvsh" Sep 29 19:19:20 crc kubenswrapper[4779]: I0929 19:19:20.147637 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/9f9243a2-2e86-40cd-b16f-05ebab05025c-trusted-ca-bundle\") pod \"console-57f48b5f7f-9lvsh\" (UID: \"9f9243a2-2e86-40cd-b16f-05ebab05025c\") " pod="openshift-console/console-57f48b5f7f-9lvsh" Sep 29 19:19:20 crc kubenswrapper[4779]: I0929 19:19:20.249252 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/9f9243a2-2e86-40cd-b16f-05ebab05025c-service-ca\") pod \"console-57f48b5f7f-9lvsh\" (UID: \"9f9243a2-2e86-40cd-b16f-05ebab05025c\") " pod="openshift-console/console-57f48b5f7f-9lvsh" Sep 29 19:19:20 crc kubenswrapper[4779]: I0929 19:19:20.249341 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/9f9243a2-2e86-40cd-b16f-05ebab05025c-trusted-ca-bundle\") pod \"console-57f48b5f7f-9lvsh\" (UID: \"9f9243a2-2e86-40cd-b16f-05ebab05025c\") " pod="openshift-console/console-57f48b5f7f-9lvsh" Sep 29 19:19:20 crc kubenswrapper[4779]: I0929 19:19:20.249389 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/9f9243a2-2e86-40cd-b16f-05ebab05025c-console-config\") pod \"console-57f48b5f7f-9lvsh\" (UID: \"9f9243a2-2e86-40cd-b16f-05ebab05025c\") " pod="openshift-console/console-57f48b5f7f-9lvsh" Sep 29 19:19:20 crc kubenswrapper[4779]: I0929 19:19:20.249437 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hgbgf\" (UniqueName: \"kubernetes.io/projected/9f9243a2-2e86-40cd-b16f-05ebab05025c-kube-api-access-hgbgf\") pod \"console-57f48b5f7f-9lvsh\" (UID: \"9f9243a2-2e86-40cd-b16f-05ebab05025c\") " pod="openshift-console/console-57f48b5f7f-9lvsh" Sep 29 19:19:20 crc kubenswrapper[4779]: I0929 19:19:20.249462 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/9f9243a2-2e86-40cd-b16f-05ebab05025c-oauth-serving-cert\") pod \"console-57f48b5f7f-9lvsh\" (UID: \"9f9243a2-2e86-40cd-b16f-05ebab05025c\") " pod="openshift-console/console-57f48b5f7f-9lvsh" Sep 29 19:19:20 crc kubenswrapper[4779]: I0929 19:19:20.249490 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/9f9243a2-2e86-40cd-b16f-05ebab05025c-console-oauth-config\") pod \"console-57f48b5f7f-9lvsh\" (UID: \"9f9243a2-2e86-40cd-b16f-05ebab05025c\") " pod="openshift-console/console-57f48b5f7f-9lvsh" Sep 29 19:19:20 crc kubenswrapper[4779]: I0929 19:19:20.249512 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/9f9243a2-2e86-40cd-b16f-05ebab05025c-console-serving-cert\") pod \"console-57f48b5f7f-9lvsh\" (UID: \"9f9243a2-2e86-40cd-b16f-05ebab05025c\") " pod="openshift-console/console-57f48b5f7f-9lvsh" Sep 29 19:19:20 crc kubenswrapper[4779]: I0929 19:19:20.251976 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/9f9243a2-2e86-40cd-b16f-05ebab05025c-console-config\") pod \"console-57f48b5f7f-9lvsh\" (UID: \"9f9243a2-2e86-40cd-b16f-05ebab05025c\") " pod="openshift-console/console-57f48b5f7f-9lvsh" Sep 29 19:19:20 crc kubenswrapper[4779]: I0929 19:19:20.262005 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/9f9243a2-2e86-40cd-b16f-05ebab05025c-oauth-serving-cert\") pod \"console-57f48b5f7f-9lvsh\" (UID: \"9f9243a2-2e86-40cd-b16f-05ebab05025c\") " pod="openshift-console/console-57f48b5f7f-9lvsh" Sep 29 19:19:20 crc kubenswrapper[4779]: I0929 19:19:20.265376 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/9f9243a2-2e86-40cd-b16f-05ebab05025c-service-ca\") pod \"console-57f48b5f7f-9lvsh\" (UID: \"9f9243a2-2e86-40cd-b16f-05ebab05025c\") " pod="openshift-console/console-57f48b5f7f-9lvsh" Sep 29 19:19:20 crc kubenswrapper[4779]: I0929 19:19:20.266033 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/9f9243a2-2e86-40cd-b16f-05ebab05025c-trusted-ca-bundle\") pod \"console-57f48b5f7f-9lvsh\" (UID: \"9f9243a2-2e86-40cd-b16f-05ebab05025c\") " pod="openshift-console/console-57f48b5f7f-9lvsh" Sep 29 19:19:20 crc kubenswrapper[4779]: I0929 19:19:20.267797 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hgbgf\" (UniqueName: \"kubernetes.io/projected/9f9243a2-2e86-40cd-b16f-05ebab05025c-kube-api-access-hgbgf\") pod \"console-57f48b5f7f-9lvsh\" (UID: \"9f9243a2-2e86-40cd-b16f-05ebab05025c\") " pod="openshift-console/console-57f48b5f7f-9lvsh" Sep 29 19:19:20 crc kubenswrapper[4779]: I0929 19:19:20.271929 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/9f9243a2-2e86-40cd-b16f-05ebab05025c-console-oauth-config\") pod \"console-57f48b5f7f-9lvsh\" (UID: \"9f9243a2-2e86-40cd-b16f-05ebab05025c\") " pod="openshift-console/console-57f48b5f7f-9lvsh" Sep 29 19:19:20 crc kubenswrapper[4779]: I0929 19:19:20.272621 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/9f9243a2-2e86-40cd-b16f-05ebab05025c-console-serving-cert\") pod \"console-57f48b5f7f-9lvsh\" (UID: \"9f9243a2-2e86-40cd-b16f-05ebab05025c\") " pod="openshift-console/console-57f48b5f7f-9lvsh" Sep 29 19:19:20 crc kubenswrapper[4779]: I0929 19:19:20.291513 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-console-plugin-864bb6dfb5-nvgwc"] Sep 29 19:19:20 crc kubenswrapper[4779]: I0929 19:19:20.304758 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-handler-v9vn6" Sep 29 19:19:20 crc kubenswrapper[4779]: I0929 19:19:20.326669 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-57f48b5f7f-9lvsh" Sep 29 19:19:20 crc kubenswrapper[4779]: W0929 19:19:20.329757 4779 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc74d7452_c58a_4336_9acf_acc9190816a9.slice/crio-c0b276af9185e69abae753416814fe11985a2fd1d01e40ebc05a6d19316a2d12 WatchSource:0}: Error finding container c0b276af9185e69abae753416814fe11985a2fd1d01e40ebc05a6d19316a2d12: Status 404 returned error can't find the container with id c0b276af9185e69abae753416814fe11985a2fd1d01e40ebc05a6d19316a2d12 Sep 29 19:19:20 crc kubenswrapper[4779]: I0929 19:19:20.425561 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-metrics-58fcddf996-zwk4x"] Sep 29 19:19:20 crc kubenswrapper[4779]: W0929 19:19:20.437004 4779 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc0c8d102_b0f5_4ebd_ad41_3359fd330e5c.slice/crio-803e88bd9286a5ab4282411d80fa2027ed5022dcd71b0f04468b4ec8829b9070 WatchSource:0}: Error finding container 803e88bd9286a5ab4282411d80fa2027ed5022dcd71b0f04468b4ec8829b9070: Status 404 returned error can't find the container with id 803e88bd9286a5ab4282411d80fa2027ed5022dcd71b0f04468b4ec8829b9070 Sep 29 19:19:20 crc kubenswrapper[4779]: I0929 19:19:20.449644 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-webhook-6d689559c5-qhlnm"] Sep 29 19:19:20 crc kubenswrapper[4779]: W0929 19:19:20.458923 4779 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podef026a49_4282_45ef_b535_288ac25fe011.slice/crio-2d28cbd4c26d37a9f9566958743c2f39d54c459272bda015d28a1cf976a7e55a WatchSource:0}: Error finding container 2d28cbd4c26d37a9f9566958743c2f39d54c459272bda015d28a1cf976a7e55a: Status 404 returned error can't find the container with id 2d28cbd4c26d37a9f9566958743c2f39d54c459272bda015d28a1cf976a7e55a Sep 29 19:19:20 crc kubenswrapper[4779]: I0929 19:19:20.526534 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-57f48b5f7f-9lvsh"] Sep 29 19:19:20 crc kubenswrapper[4779]: W0929 19:19:20.531034 4779 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9f9243a2_2e86_40cd_b16f_05ebab05025c.slice/crio-e39ce409d04463440d05844dc02ccde19726187f5328ee17ca461fd4a1999a74 WatchSource:0}: Error finding container e39ce409d04463440d05844dc02ccde19726187f5328ee17ca461fd4a1999a74: Status 404 returned error can't find the container with id e39ce409d04463440d05844dc02ccde19726187f5328ee17ca461fd4a1999a74 Sep 29 19:19:20 crc kubenswrapper[4779]: I0929 19:19:20.571912 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-handler-v9vn6" event={"ID":"c74d7452-c58a-4336-9acf-acc9190816a9","Type":"ContainerStarted","Data":"c0b276af9185e69abae753416814fe11985a2fd1d01e40ebc05a6d19316a2d12"} Sep 29 19:19:20 crc kubenswrapper[4779]: I0929 19:19:20.575109 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-57f48b5f7f-9lvsh" event={"ID":"9f9243a2-2e86-40cd-b16f-05ebab05025c","Type":"ContainerStarted","Data":"e39ce409d04463440d05844dc02ccde19726187f5328ee17ca461fd4a1999a74"} Sep 29 19:19:20 crc kubenswrapper[4779]: I0929 19:19:20.576290 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-58fcddf996-zwk4x" event={"ID":"c0c8d102-b0f5-4ebd-ad41-3359fd330e5c","Type":"ContainerStarted","Data":"803e88bd9286a5ab4282411d80fa2027ed5022dcd71b0f04468b4ec8829b9070"} Sep 29 19:19:20 crc kubenswrapper[4779]: I0929 19:19:20.577100 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-nvgwc" event={"ID":"6c719c4d-6f34-4427-8c72-69a5c0efe754","Type":"ContainerStarted","Data":"28e9360b251fab0511215085525e56f099234cfb9e5a555f2584f737395f7ff7"} Sep 29 19:19:20 crc kubenswrapper[4779]: I0929 19:19:20.577904 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-webhook-6d689559c5-qhlnm" event={"ID":"ef026a49-4282-45ef-b535-288ac25fe011","Type":"ContainerStarted","Data":"2d28cbd4c26d37a9f9566958743c2f39d54c459272bda015d28a1cf976a7e55a"} Sep 29 19:19:21 crc kubenswrapper[4779]: I0929 19:19:21.589024 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-57f48b5f7f-9lvsh" event={"ID":"9f9243a2-2e86-40cd-b16f-05ebab05025c","Type":"ContainerStarted","Data":"196df8fe0fb6a9365e69e8c5a3bfc2536a3dfd76ca0c388bc0e8c0c2f94448bf"} Sep 29 19:19:21 crc kubenswrapper[4779]: I0929 19:19:21.608935 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-57f48b5f7f-9lvsh" podStartSLOduration=2.6089178200000003 podStartE2EDuration="2.60891782s" podCreationTimestamp="2025-09-29 19:19:19 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 19:19:21.605433165 +0000 UTC m=+672.489858345" watchObservedRunningTime="2025-09-29 19:19:21.60891782 +0000 UTC m=+672.493342920" Sep 29 19:19:23 crc kubenswrapper[4779]: I0929 19:19:23.606187 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-handler-v9vn6" event={"ID":"c74d7452-c58a-4336-9acf-acc9190816a9","Type":"ContainerStarted","Data":"283b02ba1ecf29482f0d8f01304044c65b792c1dbbb8ef2c53f09422c049e147"} Sep 29 19:19:23 crc kubenswrapper[4779]: I0929 19:19:23.606635 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-nmstate/nmstate-handler-v9vn6" Sep 29 19:19:23 crc kubenswrapper[4779]: I0929 19:19:23.611208 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-58fcddf996-zwk4x" event={"ID":"c0c8d102-b0f5-4ebd-ad41-3359fd330e5c","Type":"ContainerStarted","Data":"78a1185075f93a50b4c0179a2c21d878353d29b7244f73296054ef0c7b9b771c"} Sep 29 19:19:23 crc kubenswrapper[4779]: I0929 19:19:23.613559 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-nvgwc" event={"ID":"6c719c4d-6f34-4427-8c72-69a5c0efe754","Type":"ContainerStarted","Data":"47507aabe6b70a79b9f10568715529c6bcc4452241d356b13acebf5d54fe3f91"} Sep 29 19:19:23 crc kubenswrapper[4779]: I0929 19:19:23.615819 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-webhook-6d689559c5-qhlnm" event={"ID":"ef026a49-4282-45ef-b535-288ac25fe011","Type":"ContainerStarted","Data":"33100eaf9d1d81924dacc37066a5198c9f36e61c03e0ba8bd02c760e6391d0f1"} Sep 29 19:19:23 crc kubenswrapper[4779]: I0929 19:19:23.616016 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-nmstate/nmstate-webhook-6d689559c5-qhlnm" Sep 29 19:19:23 crc kubenswrapper[4779]: I0929 19:19:23.629679 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-handler-v9vn6" podStartSLOduration=2.045877714 podStartE2EDuration="4.629662481s" podCreationTimestamp="2025-09-29 19:19:19 +0000 UTC" firstStartedPulling="2025-09-29 19:19:20.357136211 +0000 UTC m=+671.241561311" lastFinishedPulling="2025-09-29 19:19:22.940920918 +0000 UTC m=+673.825346078" observedRunningTime="2025-09-29 19:19:23.62704649 +0000 UTC m=+674.511471670" watchObservedRunningTime="2025-09-29 19:19:23.629662481 +0000 UTC m=+674.514087591" Sep 29 19:19:23 crc kubenswrapper[4779]: I0929 19:19:23.670074 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-nvgwc" podStartSLOduration=2.061256202 podStartE2EDuration="4.670040279s" podCreationTimestamp="2025-09-29 19:19:19 +0000 UTC" firstStartedPulling="2025-09-29 19:19:20.300162122 +0000 UTC m=+671.184587222" lastFinishedPulling="2025-09-29 19:19:22.908946169 +0000 UTC m=+673.793371299" observedRunningTime="2025-09-29 19:19:23.647471565 +0000 UTC m=+674.531896695" watchObservedRunningTime="2025-09-29 19:19:23.670040279 +0000 UTC m=+674.554465459" Sep 29 19:19:23 crc kubenswrapper[4779]: I0929 19:19:23.675579 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-webhook-6d689559c5-qhlnm" podStartSLOduration=2.2217519550000002 podStartE2EDuration="4.675556509s" podCreationTimestamp="2025-09-29 19:19:19 +0000 UTC" firstStartedPulling="2025-09-29 19:19:20.461037195 +0000 UTC m=+671.345462295" lastFinishedPulling="2025-09-29 19:19:22.914841759 +0000 UTC m=+673.799266849" observedRunningTime="2025-09-29 19:19:23.671600941 +0000 UTC m=+674.556026091" watchObservedRunningTime="2025-09-29 19:19:23.675556509 +0000 UTC m=+674.559981639" Sep 29 19:19:25 crc kubenswrapper[4779]: I0929 19:19:25.631101 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-58fcddf996-zwk4x" event={"ID":"c0c8d102-b0f5-4ebd-ad41-3359fd330e5c","Type":"ContainerStarted","Data":"ee9dd91b203349440d0e57cb48f53d086b90b86b992125e6b6266eebeb2646e5"} Sep 29 19:19:25 crc kubenswrapper[4779]: I0929 19:19:25.660070 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-metrics-58fcddf996-zwk4x" podStartSLOduration=1.9464914420000001 podStartE2EDuration="6.660044246s" podCreationTimestamp="2025-09-29 19:19:19 +0000 UTC" firstStartedPulling="2025-09-29 19:19:20.438968055 +0000 UTC m=+671.323393155" lastFinishedPulling="2025-09-29 19:19:25.152520849 +0000 UTC m=+676.036945959" observedRunningTime="2025-09-29 19:19:25.651280988 +0000 UTC m=+676.535706128" watchObservedRunningTime="2025-09-29 19:19:25.660044246 +0000 UTC m=+676.544469386" Sep 29 19:19:30 crc kubenswrapper[4779]: I0929 19:19:30.328025 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-57f48b5f7f-9lvsh" Sep 29 19:19:30 crc kubenswrapper[4779]: I0929 19:19:30.328948 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-57f48b5f7f-9lvsh" Sep 29 19:19:30 crc kubenswrapper[4779]: I0929 19:19:30.337155 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-57f48b5f7f-9lvsh" Sep 29 19:19:30 crc kubenswrapper[4779]: I0929 19:19:30.351490 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-nmstate/nmstate-handler-v9vn6" Sep 29 19:19:30 crc kubenswrapper[4779]: I0929 19:19:30.673194 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-57f48b5f7f-9lvsh" Sep 29 19:19:30 crc kubenswrapper[4779]: I0929 19:19:30.746500 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-console/console-f9d7485db-hhcst"] Sep 29 19:19:40 crc kubenswrapper[4779]: I0929 19:19:40.032041 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-nmstate/nmstate-webhook-6d689559c5-qhlnm" Sep 29 19:19:55 crc kubenswrapper[4779]: I0929 19:19:55.799905 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-console/console-f9d7485db-hhcst" podUID="52419e1c-e6c0-4225-95bf-da711c24e399" containerName="console" containerID="cri-o://1d0d0b98a8f7584b56aba7dd5a3a46d92044e49c9cbd883378512f02b6fe215d" gracePeriod=15 Sep 29 19:19:56 crc kubenswrapper[4779]: I0929 19:19:56.855693 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-f9d7485db-hhcst_52419e1c-e6c0-4225-95bf-da711c24e399/console/0.log" Sep 29 19:19:56 crc kubenswrapper[4779]: I0929 19:19:56.856016 4779 generic.go:334] "Generic (PLEG): container finished" podID="52419e1c-e6c0-4225-95bf-da711c24e399" containerID="1d0d0b98a8f7584b56aba7dd5a3a46d92044e49c9cbd883378512f02b6fe215d" exitCode=2 Sep 29 19:19:56 crc kubenswrapper[4779]: I0929 19:19:56.856066 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-hhcst" event={"ID":"52419e1c-e6c0-4225-95bf-da711c24e399","Type":"ContainerDied","Data":"1d0d0b98a8f7584b56aba7dd5a3a46d92044e49c9cbd883378512f02b6fe215d"} Sep 29 19:19:56 crc kubenswrapper[4779]: I0929 19:19:56.941009 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-f9d7485db-hhcst_52419e1c-e6c0-4225-95bf-da711c24e399/console/0.log" Sep 29 19:19:56 crc kubenswrapper[4779]: I0929 19:19:56.941408 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-hhcst" Sep 29 19:19:57 crc kubenswrapper[4779]: I0929 19:19:57.074776 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/52419e1c-e6c0-4225-95bf-da711c24e399-oauth-serving-cert\") pod \"52419e1c-e6c0-4225-95bf-da711c24e399\" (UID: \"52419e1c-e6c0-4225-95bf-da711c24e399\") " Sep 29 19:19:57 crc kubenswrapper[4779]: I0929 19:19:57.074912 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kqr2m\" (UniqueName: \"kubernetes.io/projected/52419e1c-e6c0-4225-95bf-da711c24e399-kube-api-access-kqr2m\") pod \"52419e1c-e6c0-4225-95bf-da711c24e399\" (UID: \"52419e1c-e6c0-4225-95bf-da711c24e399\") " Sep 29 19:19:57 crc kubenswrapper[4779]: I0929 19:19:57.074954 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/52419e1c-e6c0-4225-95bf-da711c24e399-console-config\") pod \"52419e1c-e6c0-4225-95bf-da711c24e399\" (UID: \"52419e1c-e6c0-4225-95bf-da711c24e399\") " Sep 29 19:19:57 crc kubenswrapper[4779]: I0929 19:19:57.075024 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/52419e1c-e6c0-4225-95bf-da711c24e399-console-oauth-config\") pod \"52419e1c-e6c0-4225-95bf-da711c24e399\" (UID: \"52419e1c-e6c0-4225-95bf-da711c24e399\") " Sep 29 19:19:57 crc kubenswrapper[4779]: I0929 19:19:57.075079 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/52419e1c-e6c0-4225-95bf-da711c24e399-service-ca\") pod \"52419e1c-e6c0-4225-95bf-da711c24e399\" (UID: \"52419e1c-e6c0-4225-95bf-da711c24e399\") " Sep 29 19:19:57 crc kubenswrapper[4779]: I0929 19:19:57.075141 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/52419e1c-e6c0-4225-95bf-da711c24e399-console-serving-cert\") pod \"52419e1c-e6c0-4225-95bf-da711c24e399\" (UID: \"52419e1c-e6c0-4225-95bf-da711c24e399\") " Sep 29 19:19:57 crc kubenswrapper[4779]: I0929 19:19:57.075195 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/52419e1c-e6c0-4225-95bf-da711c24e399-trusted-ca-bundle\") pod \"52419e1c-e6c0-4225-95bf-da711c24e399\" (UID: \"52419e1c-e6c0-4225-95bf-da711c24e399\") " Sep 29 19:19:57 crc kubenswrapper[4779]: I0929 19:19:57.075930 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/52419e1c-e6c0-4225-95bf-da711c24e399-console-config" (OuterVolumeSpecName: "console-config") pod "52419e1c-e6c0-4225-95bf-da711c24e399" (UID: "52419e1c-e6c0-4225-95bf-da711c24e399"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:19:57 crc kubenswrapper[4779]: I0929 19:19:57.075999 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/52419e1c-e6c0-4225-95bf-da711c24e399-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "52419e1c-e6c0-4225-95bf-da711c24e399" (UID: "52419e1c-e6c0-4225-95bf-da711c24e399"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:19:57 crc kubenswrapper[4779]: I0929 19:19:57.076176 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/52419e1c-e6c0-4225-95bf-da711c24e399-service-ca" (OuterVolumeSpecName: "service-ca") pod "52419e1c-e6c0-4225-95bf-da711c24e399" (UID: "52419e1c-e6c0-4225-95bf-da711c24e399"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:19:57 crc kubenswrapper[4779]: I0929 19:19:57.076412 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/52419e1c-e6c0-4225-95bf-da711c24e399-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "52419e1c-e6c0-4225-95bf-da711c24e399" (UID: "52419e1c-e6c0-4225-95bf-da711c24e399"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:19:57 crc kubenswrapper[4779]: I0929 19:19:57.084840 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/52419e1c-e6c0-4225-95bf-da711c24e399-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "52419e1c-e6c0-4225-95bf-da711c24e399" (UID: "52419e1c-e6c0-4225-95bf-da711c24e399"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:19:57 crc kubenswrapper[4779]: I0929 19:19:57.086971 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/52419e1c-e6c0-4225-95bf-da711c24e399-kube-api-access-kqr2m" (OuterVolumeSpecName: "kube-api-access-kqr2m") pod "52419e1c-e6c0-4225-95bf-da711c24e399" (UID: "52419e1c-e6c0-4225-95bf-da711c24e399"). InnerVolumeSpecName "kube-api-access-kqr2m". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:19:57 crc kubenswrapper[4779]: I0929 19:19:57.092057 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/52419e1c-e6c0-4225-95bf-da711c24e399-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "52419e1c-e6c0-4225-95bf-da711c24e399" (UID: "52419e1c-e6c0-4225-95bf-da711c24e399"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:19:57 crc kubenswrapper[4779]: I0929 19:19:57.171310 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d968mxxp"] Sep 29 19:19:57 crc kubenswrapper[4779]: E0929 19:19:57.171702 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="52419e1c-e6c0-4225-95bf-da711c24e399" containerName="console" Sep 29 19:19:57 crc kubenswrapper[4779]: I0929 19:19:57.171725 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="52419e1c-e6c0-4225-95bf-da711c24e399" containerName="console" Sep 29 19:19:57 crc kubenswrapper[4779]: I0929 19:19:57.171924 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="52419e1c-e6c0-4225-95bf-da711c24e399" containerName="console" Sep 29 19:19:57 crc kubenswrapper[4779]: I0929 19:19:57.173247 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d968mxxp" Sep 29 19:19:57 crc kubenswrapper[4779]: I0929 19:19:57.176891 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Sep 29 19:19:57 crc kubenswrapper[4779]: I0929 19:19:57.177112 4779 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/52419e1c-e6c0-4225-95bf-da711c24e399-console-oauth-config\") on node \"crc\" DevicePath \"\"" Sep 29 19:19:57 crc kubenswrapper[4779]: I0929 19:19:57.177150 4779 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/52419e1c-e6c0-4225-95bf-da711c24e399-service-ca\") on node \"crc\" DevicePath \"\"" Sep 29 19:19:57 crc kubenswrapper[4779]: I0929 19:19:57.177174 4779 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/52419e1c-e6c0-4225-95bf-da711c24e399-console-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 29 19:19:57 crc kubenswrapper[4779]: I0929 19:19:57.177192 4779 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/52419e1c-e6c0-4225-95bf-da711c24e399-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 19:19:57 crc kubenswrapper[4779]: I0929 19:19:57.177210 4779 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/52419e1c-e6c0-4225-95bf-da711c24e399-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 29 19:19:57 crc kubenswrapper[4779]: I0929 19:19:57.177227 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kqr2m\" (UniqueName: \"kubernetes.io/projected/52419e1c-e6c0-4225-95bf-da711c24e399-kube-api-access-kqr2m\") on node \"crc\" DevicePath \"\"" Sep 29 19:19:57 crc kubenswrapper[4779]: I0929 19:19:57.177245 4779 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/52419e1c-e6c0-4225-95bf-da711c24e399-console-config\") on node \"crc\" DevicePath \"\"" Sep 29 19:19:57 crc kubenswrapper[4779]: I0929 19:19:57.181063 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d968mxxp"] Sep 29 19:19:57 crc kubenswrapper[4779]: I0929 19:19:57.278199 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/a1246df7-aa2f-4e5a-9a55-99e92227fcbf-util\") pod \"f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d968mxxp\" (UID: \"a1246df7-aa2f-4e5a-9a55-99e92227fcbf\") " pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d968mxxp" Sep 29 19:19:57 crc kubenswrapper[4779]: I0929 19:19:57.278271 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/a1246df7-aa2f-4e5a-9a55-99e92227fcbf-bundle\") pod \"f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d968mxxp\" (UID: \"a1246df7-aa2f-4e5a-9a55-99e92227fcbf\") " pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d968mxxp" Sep 29 19:19:57 crc kubenswrapper[4779]: I0929 19:19:57.278366 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hjvd2\" (UniqueName: \"kubernetes.io/projected/a1246df7-aa2f-4e5a-9a55-99e92227fcbf-kube-api-access-hjvd2\") pod \"f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d968mxxp\" (UID: \"a1246df7-aa2f-4e5a-9a55-99e92227fcbf\") " pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d968mxxp" Sep 29 19:19:57 crc kubenswrapper[4779]: I0929 19:19:57.380071 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hjvd2\" (UniqueName: \"kubernetes.io/projected/a1246df7-aa2f-4e5a-9a55-99e92227fcbf-kube-api-access-hjvd2\") pod \"f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d968mxxp\" (UID: \"a1246df7-aa2f-4e5a-9a55-99e92227fcbf\") " pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d968mxxp" Sep 29 19:19:57 crc kubenswrapper[4779]: I0929 19:19:57.380214 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/a1246df7-aa2f-4e5a-9a55-99e92227fcbf-util\") pod \"f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d968mxxp\" (UID: \"a1246df7-aa2f-4e5a-9a55-99e92227fcbf\") " pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d968mxxp" Sep 29 19:19:57 crc kubenswrapper[4779]: I0929 19:19:57.380279 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/a1246df7-aa2f-4e5a-9a55-99e92227fcbf-bundle\") pod \"f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d968mxxp\" (UID: \"a1246df7-aa2f-4e5a-9a55-99e92227fcbf\") " pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d968mxxp" Sep 29 19:19:57 crc kubenswrapper[4779]: I0929 19:19:57.380927 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/a1246df7-aa2f-4e5a-9a55-99e92227fcbf-bundle\") pod \"f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d968mxxp\" (UID: \"a1246df7-aa2f-4e5a-9a55-99e92227fcbf\") " pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d968mxxp" Sep 29 19:19:57 crc kubenswrapper[4779]: I0929 19:19:57.381177 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/a1246df7-aa2f-4e5a-9a55-99e92227fcbf-util\") pod \"f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d968mxxp\" (UID: \"a1246df7-aa2f-4e5a-9a55-99e92227fcbf\") " pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d968mxxp" Sep 29 19:19:57 crc kubenswrapper[4779]: I0929 19:19:57.409081 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hjvd2\" (UniqueName: \"kubernetes.io/projected/a1246df7-aa2f-4e5a-9a55-99e92227fcbf-kube-api-access-hjvd2\") pod \"f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d968mxxp\" (UID: \"a1246df7-aa2f-4e5a-9a55-99e92227fcbf\") " pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d968mxxp" Sep 29 19:19:57 crc kubenswrapper[4779]: I0929 19:19:57.501604 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d968mxxp" Sep 29 19:19:57 crc kubenswrapper[4779]: I0929 19:19:57.866665 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-f9d7485db-hhcst_52419e1c-e6c0-4225-95bf-da711c24e399/console/0.log" Sep 29 19:19:57 crc kubenswrapper[4779]: I0929 19:19:57.867072 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-hhcst" event={"ID":"52419e1c-e6c0-4225-95bf-da711c24e399","Type":"ContainerDied","Data":"bfa5ff66d48704f02e9adaab4ff4ada4788ea0ca5db9085684e0c70f091481cd"} Sep 29 19:19:57 crc kubenswrapper[4779]: I0929 19:19:57.867138 4779 scope.go:117] "RemoveContainer" containerID="1d0d0b98a8f7584b56aba7dd5a3a46d92044e49c9cbd883378512f02b6fe215d" Sep 29 19:19:57 crc kubenswrapper[4779]: I0929 19:19:57.867175 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-hhcst" Sep 29 19:19:57 crc kubenswrapper[4779]: I0929 19:19:57.897370 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-console/console-f9d7485db-hhcst"] Sep 29 19:19:57 crc kubenswrapper[4779]: I0929 19:19:57.903676 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-console/console-f9d7485db-hhcst"] Sep 29 19:19:57 crc kubenswrapper[4779]: I0929 19:19:57.985461 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d968mxxp"] Sep 29 19:19:58 crc kubenswrapper[4779]: I0929 19:19:58.875225 4779 generic.go:334] "Generic (PLEG): container finished" podID="a1246df7-aa2f-4e5a-9a55-99e92227fcbf" containerID="d7563c935dbbfa54fd3164ed5a5d2bdbd15033b732a4157c5a58bd3410ae69ef" exitCode=0 Sep 29 19:19:58 crc kubenswrapper[4779]: I0929 19:19:58.875306 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d968mxxp" event={"ID":"a1246df7-aa2f-4e5a-9a55-99e92227fcbf","Type":"ContainerDied","Data":"d7563c935dbbfa54fd3164ed5a5d2bdbd15033b732a4157c5a58bd3410ae69ef"} Sep 29 19:19:58 crc kubenswrapper[4779]: I0929 19:19:58.875366 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d968mxxp" event={"ID":"a1246df7-aa2f-4e5a-9a55-99e92227fcbf","Type":"ContainerStarted","Data":"0be8eae5658c8914da4fa62d23e35d96560de3d2610f1c3dd70effed6a916ed6"} Sep 29 19:19:59 crc kubenswrapper[4779]: I0929 19:19:59.781463 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="52419e1c-e6c0-4225-95bf-da711c24e399" path="/var/lib/kubelet/pods/52419e1c-e6c0-4225-95bf-da711c24e399/volumes" Sep 29 19:20:00 crc kubenswrapper[4779]: I0929 19:20:00.894014 4779 generic.go:334] "Generic (PLEG): container finished" podID="a1246df7-aa2f-4e5a-9a55-99e92227fcbf" containerID="2984b3920f0f0ba9d191cbfcd689aebeb6ca5fe1f5f0de4e363833fbf33b2c9d" exitCode=0 Sep 29 19:20:00 crc kubenswrapper[4779]: I0929 19:20:00.894135 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d968mxxp" event={"ID":"a1246df7-aa2f-4e5a-9a55-99e92227fcbf","Type":"ContainerDied","Data":"2984b3920f0f0ba9d191cbfcd689aebeb6ca5fe1f5f0de4e363833fbf33b2c9d"} Sep 29 19:20:01 crc kubenswrapper[4779]: I0929 19:20:01.916886 4779 generic.go:334] "Generic (PLEG): container finished" podID="a1246df7-aa2f-4e5a-9a55-99e92227fcbf" containerID="72c0565478dd92d82232c6a9ad2dbdd564a8ef74e876dd127a8d6d8f888edccf" exitCode=0 Sep 29 19:20:01 crc kubenswrapper[4779]: I0929 19:20:01.916976 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d968mxxp" event={"ID":"a1246df7-aa2f-4e5a-9a55-99e92227fcbf","Type":"ContainerDied","Data":"72c0565478dd92d82232c6a9ad2dbdd564a8ef74e876dd127a8d6d8f888edccf"} Sep 29 19:20:03 crc kubenswrapper[4779]: I0929 19:20:03.242124 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d968mxxp" Sep 29 19:20:03 crc kubenswrapper[4779]: I0929 19:20:03.372717 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/a1246df7-aa2f-4e5a-9a55-99e92227fcbf-util\") pod \"a1246df7-aa2f-4e5a-9a55-99e92227fcbf\" (UID: \"a1246df7-aa2f-4e5a-9a55-99e92227fcbf\") " Sep 29 19:20:03 crc kubenswrapper[4779]: I0929 19:20:03.372832 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hjvd2\" (UniqueName: \"kubernetes.io/projected/a1246df7-aa2f-4e5a-9a55-99e92227fcbf-kube-api-access-hjvd2\") pod \"a1246df7-aa2f-4e5a-9a55-99e92227fcbf\" (UID: \"a1246df7-aa2f-4e5a-9a55-99e92227fcbf\") " Sep 29 19:20:03 crc kubenswrapper[4779]: I0929 19:20:03.372910 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/a1246df7-aa2f-4e5a-9a55-99e92227fcbf-bundle\") pod \"a1246df7-aa2f-4e5a-9a55-99e92227fcbf\" (UID: \"a1246df7-aa2f-4e5a-9a55-99e92227fcbf\") " Sep 29 19:20:03 crc kubenswrapper[4779]: I0929 19:20:03.374210 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a1246df7-aa2f-4e5a-9a55-99e92227fcbf-bundle" (OuterVolumeSpecName: "bundle") pod "a1246df7-aa2f-4e5a-9a55-99e92227fcbf" (UID: "a1246df7-aa2f-4e5a-9a55-99e92227fcbf"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 19:20:03 crc kubenswrapper[4779]: I0929 19:20:03.378252 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a1246df7-aa2f-4e5a-9a55-99e92227fcbf-kube-api-access-hjvd2" (OuterVolumeSpecName: "kube-api-access-hjvd2") pod "a1246df7-aa2f-4e5a-9a55-99e92227fcbf" (UID: "a1246df7-aa2f-4e5a-9a55-99e92227fcbf"). InnerVolumeSpecName "kube-api-access-hjvd2". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:20:03 crc kubenswrapper[4779]: I0929 19:20:03.393649 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a1246df7-aa2f-4e5a-9a55-99e92227fcbf-util" (OuterVolumeSpecName: "util") pod "a1246df7-aa2f-4e5a-9a55-99e92227fcbf" (UID: "a1246df7-aa2f-4e5a-9a55-99e92227fcbf"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 19:20:03 crc kubenswrapper[4779]: I0929 19:20:03.474486 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hjvd2\" (UniqueName: \"kubernetes.io/projected/a1246df7-aa2f-4e5a-9a55-99e92227fcbf-kube-api-access-hjvd2\") on node \"crc\" DevicePath \"\"" Sep 29 19:20:03 crc kubenswrapper[4779]: I0929 19:20:03.474847 4779 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/a1246df7-aa2f-4e5a-9a55-99e92227fcbf-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 19:20:03 crc kubenswrapper[4779]: I0929 19:20:03.474960 4779 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/a1246df7-aa2f-4e5a-9a55-99e92227fcbf-util\") on node \"crc\" DevicePath \"\"" Sep 29 19:20:03 crc kubenswrapper[4779]: I0929 19:20:03.938944 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d968mxxp" event={"ID":"a1246df7-aa2f-4e5a-9a55-99e92227fcbf","Type":"ContainerDied","Data":"0be8eae5658c8914da4fa62d23e35d96560de3d2610f1c3dd70effed6a916ed6"} Sep 29 19:20:03 crc kubenswrapper[4779]: I0929 19:20:03.939004 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d968mxxp" Sep 29 19:20:03 crc kubenswrapper[4779]: I0929 19:20:03.939032 4779 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0be8eae5658c8914da4fa62d23e35d96560de3d2610f1c3dd70effed6a916ed6" Sep 29 19:20:17 crc kubenswrapper[4779]: I0929 19:20:17.763239 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/metallb-operator-controller-manager-6ccfd99bc8-cd86r"] Sep 29 19:20:17 crc kubenswrapper[4779]: E0929 19:20:17.764058 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a1246df7-aa2f-4e5a-9a55-99e92227fcbf" containerName="pull" Sep 29 19:20:17 crc kubenswrapper[4779]: I0929 19:20:17.764074 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="a1246df7-aa2f-4e5a-9a55-99e92227fcbf" containerName="pull" Sep 29 19:20:17 crc kubenswrapper[4779]: E0929 19:20:17.764085 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a1246df7-aa2f-4e5a-9a55-99e92227fcbf" containerName="extract" Sep 29 19:20:17 crc kubenswrapper[4779]: I0929 19:20:17.764091 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="a1246df7-aa2f-4e5a-9a55-99e92227fcbf" containerName="extract" Sep 29 19:20:17 crc kubenswrapper[4779]: E0929 19:20:17.764103 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a1246df7-aa2f-4e5a-9a55-99e92227fcbf" containerName="util" Sep 29 19:20:17 crc kubenswrapper[4779]: I0929 19:20:17.764109 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="a1246df7-aa2f-4e5a-9a55-99e92227fcbf" containerName="util" Sep 29 19:20:17 crc kubenswrapper[4779]: I0929 19:20:17.764235 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="a1246df7-aa2f-4e5a-9a55-99e92227fcbf" containerName="extract" Sep 29 19:20:17 crc kubenswrapper[4779]: I0929 19:20:17.764687 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-controller-manager-6ccfd99bc8-cd86r" Sep 29 19:20:17 crc kubenswrapper[4779]: I0929 19:20:17.768768 4779 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-controller-manager-service-cert" Sep 29 19:20:17 crc kubenswrapper[4779]: I0929 19:20:17.769011 4779 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-webhook-server-cert" Sep 29 19:20:17 crc kubenswrapper[4779]: I0929 19:20:17.769162 4779 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"manager-account-dockercfg-rmx84" Sep 29 19:20:17 crc kubenswrapper[4779]: I0929 19:20:17.769279 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"openshift-service-ca.crt" Sep 29 19:20:17 crc kubenswrapper[4779]: I0929 19:20:17.769674 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"kube-root-ca.crt" Sep 29 19:20:17 crc kubenswrapper[4779]: I0929 19:20:17.789664 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-controller-manager-6ccfd99bc8-cd86r"] Sep 29 19:20:17 crc kubenswrapper[4779]: I0929 19:20:17.880976 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q8798\" (UniqueName: \"kubernetes.io/projected/bc87a3b2-72fa-4bca-9172-47b799399c7b-kube-api-access-q8798\") pod \"metallb-operator-controller-manager-6ccfd99bc8-cd86r\" (UID: \"bc87a3b2-72fa-4bca-9172-47b799399c7b\") " pod="metallb-system/metallb-operator-controller-manager-6ccfd99bc8-cd86r" Sep 29 19:20:17 crc kubenswrapper[4779]: I0929 19:20:17.881035 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/bc87a3b2-72fa-4bca-9172-47b799399c7b-webhook-cert\") pod \"metallb-operator-controller-manager-6ccfd99bc8-cd86r\" (UID: \"bc87a3b2-72fa-4bca-9172-47b799399c7b\") " pod="metallb-system/metallb-operator-controller-manager-6ccfd99bc8-cd86r" Sep 29 19:20:17 crc kubenswrapper[4779]: I0929 19:20:17.881110 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/bc87a3b2-72fa-4bca-9172-47b799399c7b-apiservice-cert\") pod \"metallb-operator-controller-manager-6ccfd99bc8-cd86r\" (UID: \"bc87a3b2-72fa-4bca-9172-47b799399c7b\") " pod="metallb-system/metallb-operator-controller-manager-6ccfd99bc8-cd86r" Sep 29 19:20:17 crc kubenswrapper[4779]: I0929 19:20:17.982154 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/bc87a3b2-72fa-4bca-9172-47b799399c7b-apiservice-cert\") pod \"metallb-operator-controller-manager-6ccfd99bc8-cd86r\" (UID: \"bc87a3b2-72fa-4bca-9172-47b799399c7b\") " pod="metallb-system/metallb-operator-controller-manager-6ccfd99bc8-cd86r" Sep 29 19:20:17 crc kubenswrapper[4779]: I0929 19:20:17.982265 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q8798\" (UniqueName: \"kubernetes.io/projected/bc87a3b2-72fa-4bca-9172-47b799399c7b-kube-api-access-q8798\") pod \"metallb-operator-controller-manager-6ccfd99bc8-cd86r\" (UID: \"bc87a3b2-72fa-4bca-9172-47b799399c7b\") " pod="metallb-system/metallb-operator-controller-manager-6ccfd99bc8-cd86r" Sep 29 19:20:17 crc kubenswrapper[4779]: I0929 19:20:17.982304 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/bc87a3b2-72fa-4bca-9172-47b799399c7b-webhook-cert\") pod \"metallb-operator-controller-manager-6ccfd99bc8-cd86r\" (UID: \"bc87a3b2-72fa-4bca-9172-47b799399c7b\") " pod="metallb-system/metallb-operator-controller-manager-6ccfd99bc8-cd86r" Sep 29 19:20:17 crc kubenswrapper[4779]: I0929 19:20:17.988711 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/bc87a3b2-72fa-4bca-9172-47b799399c7b-apiservice-cert\") pod \"metallb-operator-controller-manager-6ccfd99bc8-cd86r\" (UID: \"bc87a3b2-72fa-4bca-9172-47b799399c7b\") " pod="metallb-system/metallb-operator-controller-manager-6ccfd99bc8-cd86r" Sep 29 19:20:17 crc kubenswrapper[4779]: I0929 19:20:17.993929 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/bc87a3b2-72fa-4bca-9172-47b799399c7b-webhook-cert\") pod \"metallb-operator-controller-manager-6ccfd99bc8-cd86r\" (UID: \"bc87a3b2-72fa-4bca-9172-47b799399c7b\") " pod="metallb-system/metallb-operator-controller-manager-6ccfd99bc8-cd86r" Sep 29 19:20:17 crc kubenswrapper[4779]: I0929 19:20:17.997480 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/metallb-operator-webhook-server-8764cbcb6-t7df9"] Sep 29 19:20:17 crc kubenswrapper[4779]: I0929 19:20:17.998255 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-webhook-server-8764cbcb6-t7df9" Sep 29 19:20:18 crc kubenswrapper[4779]: I0929 19:20:18.001099 4779 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-webhook-cert" Sep 29 19:20:18 crc kubenswrapper[4779]: I0929 19:20:18.001217 4779 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"controller-dockercfg-4rq2c" Sep 29 19:20:18 crc kubenswrapper[4779]: I0929 19:20:18.001389 4779 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-webhook-server-service-cert" Sep 29 19:20:18 crc kubenswrapper[4779]: I0929 19:20:18.015935 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q8798\" (UniqueName: \"kubernetes.io/projected/bc87a3b2-72fa-4bca-9172-47b799399c7b-kube-api-access-q8798\") pod \"metallb-operator-controller-manager-6ccfd99bc8-cd86r\" (UID: \"bc87a3b2-72fa-4bca-9172-47b799399c7b\") " pod="metallb-system/metallb-operator-controller-manager-6ccfd99bc8-cd86r" Sep 29 19:20:18 crc kubenswrapper[4779]: I0929 19:20:18.024847 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-webhook-server-8764cbcb6-t7df9"] Sep 29 19:20:18 crc kubenswrapper[4779]: I0929 19:20:18.083142 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8cgqk\" (UniqueName: \"kubernetes.io/projected/3d88a574-ee3d-4b67-80c7-cb9ab603edfd-kube-api-access-8cgqk\") pod \"metallb-operator-webhook-server-8764cbcb6-t7df9\" (UID: \"3d88a574-ee3d-4b67-80c7-cb9ab603edfd\") " pod="metallb-system/metallb-operator-webhook-server-8764cbcb6-t7df9" Sep 29 19:20:18 crc kubenswrapper[4779]: I0929 19:20:18.083212 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/3d88a574-ee3d-4b67-80c7-cb9ab603edfd-webhook-cert\") pod \"metallb-operator-webhook-server-8764cbcb6-t7df9\" (UID: \"3d88a574-ee3d-4b67-80c7-cb9ab603edfd\") " pod="metallb-system/metallb-operator-webhook-server-8764cbcb6-t7df9" Sep 29 19:20:18 crc kubenswrapper[4779]: I0929 19:20:18.083244 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/3d88a574-ee3d-4b67-80c7-cb9ab603edfd-apiservice-cert\") pod \"metallb-operator-webhook-server-8764cbcb6-t7df9\" (UID: \"3d88a574-ee3d-4b67-80c7-cb9ab603edfd\") " pod="metallb-system/metallb-operator-webhook-server-8764cbcb6-t7df9" Sep 29 19:20:18 crc kubenswrapper[4779]: I0929 19:20:18.085017 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-controller-manager-6ccfd99bc8-cd86r" Sep 29 19:20:18 crc kubenswrapper[4779]: I0929 19:20:18.184171 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/3d88a574-ee3d-4b67-80c7-cb9ab603edfd-apiservice-cert\") pod \"metallb-operator-webhook-server-8764cbcb6-t7df9\" (UID: \"3d88a574-ee3d-4b67-80c7-cb9ab603edfd\") " pod="metallb-system/metallb-operator-webhook-server-8764cbcb6-t7df9" Sep 29 19:20:18 crc kubenswrapper[4779]: I0929 19:20:18.184683 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8cgqk\" (UniqueName: \"kubernetes.io/projected/3d88a574-ee3d-4b67-80c7-cb9ab603edfd-kube-api-access-8cgqk\") pod \"metallb-operator-webhook-server-8764cbcb6-t7df9\" (UID: \"3d88a574-ee3d-4b67-80c7-cb9ab603edfd\") " pod="metallb-system/metallb-operator-webhook-server-8764cbcb6-t7df9" Sep 29 19:20:18 crc kubenswrapper[4779]: I0929 19:20:18.184766 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/3d88a574-ee3d-4b67-80c7-cb9ab603edfd-webhook-cert\") pod \"metallb-operator-webhook-server-8764cbcb6-t7df9\" (UID: \"3d88a574-ee3d-4b67-80c7-cb9ab603edfd\") " pod="metallb-system/metallb-operator-webhook-server-8764cbcb6-t7df9" Sep 29 19:20:18 crc kubenswrapper[4779]: I0929 19:20:18.191355 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/3d88a574-ee3d-4b67-80c7-cb9ab603edfd-webhook-cert\") pod \"metallb-operator-webhook-server-8764cbcb6-t7df9\" (UID: \"3d88a574-ee3d-4b67-80c7-cb9ab603edfd\") " pod="metallb-system/metallb-operator-webhook-server-8764cbcb6-t7df9" Sep 29 19:20:18 crc kubenswrapper[4779]: I0929 19:20:18.191421 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/3d88a574-ee3d-4b67-80c7-cb9ab603edfd-apiservice-cert\") pod \"metallb-operator-webhook-server-8764cbcb6-t7df9\" (UID: \"3d88a574-ee3d-4b67-80c7-cb9ab603edfd\") " pod="metallb-system/metallb-operator-webhook-server-8764cbcb6-t7df9" Sep 29 19:20:18 crc kubenswrapper[4779]: I0929 19:20:18.201967 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8cgqk\" (UniqueName: \"kubernetes.io/projected/3d88a574-ee3d-4b67-80c7-cb9ab603edfd-kube-api-access-8cgqk\") pod \"metallb-operator-webhook-server-8764cbcb6-t7df9\" (UID: \"3d88a574-ee3d-4b67-80c7-cb9ab603edfd\") " pod="metallb-system/metallb-operator-webhook-server-8764cbcb6-t7df9" Sep 29 19:20:18 crc kubenswrapper[4779]: I0929 19:20:18.287732 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-controller-manager-6ccfd99bc8-cd86r"] Sep 29 19:20:18 crc kubenswrapper[4779]: I0929 19:20:18.351678 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-webhook-server-8764cbcb6-t7df9" Sep 29 19:20:18 crc kubenswrapper[4779]: I0929 19:20:18.775573 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-webhook-server-8764cbcb6-t7df9"] Sep 29 19:20:18 crc kubenswrapper[4779]: W0929 19:20:18.783012 4779 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3d88a574_ee3d_4b67_80c7_cb9ab603edfd.slice/crio-994132f896ca40a3208562fcd91cfa24d91ce6b15eeb850f82511f71264c320b WatchSource:0}: Error finding container 994132f896ca40a3208562fcd91cfa24d91ce6b15eeb850f82511f71264c320b: Status 404 returned error can't find the container with id 994132f896ca40a3208562fcd91cfa24d91ce6b15eeb850f82511f71264c320b Sep 29 19:20:19 crc kubenswrapper[4779]: I0929 19:20:19.036865 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-webhook-server-8764cbcb6-t7df9" event={"ID":"3d88a574-ee3d-4b67-80c7-cb9ab603edfd","Type":"ContainerStarted","Data":"994132f896ca40a3208562fcd91cfa24d91ce6b15eeb850f82511f71264c320b"} Sep 29 19:20:19 crc kubenswrapper[4779]: I0929 19:20:19.038080 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-6ccfd99bc8-cd86r" event={"ID":"bc87a3b2-72fa-4bca-9172-47b799399c7b","Type":"ContainerStarted","Data":"e0df10bfd0ff0c61d8df778cef147f58aa1535c3c27c1c56238814a268f0ffc2"} Sep 29 19:20:22 crc kubenswrapper[4779]: I0929 19:20:22.067952 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-6ccfd99bc8-cd86r" event={"ID":"bc87a3b2-72fa-4bca-9172-47b799399c7b","Type":"ContainerStarted","Data":"d1a96a6e01afe2247ad783cf4d2013d0b114d251e7da98bf607ff00675f020bd"} Sep 29 19:20:22 crc kubenswrapper[4779]: I0929 19:20:22.068478 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/metallb-operator-controller-manager-6ccfd99bc8-cd86r" Sep 29 19:20:22 crc kubenswrapper[4779]: I0929 19:20:22.095047 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/metallb-operator-controller-manager-6ccfd99bc8-cd86r" podStartSLOduration=1.941845654 podStartE2EDuration="5.095028636s" podCreationTimestamp="2025-09-29 19:20:17 +0000 UTC" firstStartedPulling="2025-09-29 19:20:18.298576342 +0000 UTC m=+729.183001452" lastFinishedPulling="2025-09-29 19:20:21.451759334 +0000 UTC m=+732.336184434" observedRunningTime="2025-09-29 19:20:22.089928247 +0000 UTC m=+732.974353347" watchObservedRunningTime="2025-09-29 19:20:22.095028636 +0000 UTC m=+732.979453746" Sep 29 19:20:24 crc kubenswrapper[4779]: I0929 19:20:24.099201 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-webhook-server-8764cbcb6-t7df9" event={"ID":"3d88a574-ee3d-4b67-80c7-cb9ab603edfd","Type":"ContainerStarted","Data":"26dea5bf9d328009007af5ce6d06033818de28d5ccf22370d80966011a3dd3ae"} Sep 29 19:20:24 crc kubenswrapper[4779]: I0929 19:20:24.099640 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/metallb-operator-webhook-server-8764cbcb6-t7df9" Sep 29 19:20:38 crc kubenswrapper[4779]: I0929 19:20:38.356904 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/metallb-operator-webhook-server-8764cbcb6-t7df9" Sep 29 19:20:38 crc kubenswrapper[4779]: I0929 19:20:38.385649 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/metallb-operator-webhook-server-8764cbcb6-t7df9" podStartSLOduration=16.83487881 podStartE2EDuration="21.385630919s" podCreationTimestamp="2025-09-29 19:20:17 +0000 UTC" firstStartedPulling="2025-09-29 19:20:18.787941894 +0000 UTC m=+729.672367004" lastFinishedPulling="2025-09-29 19:20:23.338694003 +0000 UTC m=+734.223119113" observedRunningTime="2025-09-29 19:20:24.136390709 +0000 UTC m=+735.020815839" watchObservedRunningTime="2025-09-29 19:20:38.385630919 +0000 UTC m=+749.270056029" Sep 29 19:20:47 crc kubenswrapper[4779]: I0929 19:20:47.361163 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-qlvq5"] Sep 29 19:20:47 crc kubenswrapper[4779]: I0929 19:20:47.361983 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-879f6c89f-qlvq5" podUID="8ed9d7da-9845-4c1a-8ec8-98f610a7cc9a" containerName="controller-manager" containerID="cri-o://5ebe0ee1ae511cf13e2ac9b802f9f61f7e92082bb38e9022519e610757de4faf" gracePeriod=30 Sep 29 19:20:47 crc kubenswrapper[4779]: I0929 19:20:47.450031 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-s9rfn"] Sep 29 19:20:47 crc kubenswrapper[4779]: I0929 19:20:47.450227 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-s9rfn" podUID="3197e1c4-03d1-42f1-8bee-87a97962cf70" containerName="route-controller-manager" containerID="cri-o://d8509e6555061139b1cde95100700ea82f9cb6f30680698f144adeffb580a46c" gracePeriod=30 Sep 29 19:20:47 crc kubenswrapper[4779]: I0929 19:20:47.749474 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-qlvq5" Sep 29 19:20:47 crc kubenswrapper[4779]: I0929 19:20:47.811468 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-s9rfn" Sep 29 19:20:47 crc kubenswrapper[4779]: I0929 19:20:47.925825 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8ed9d7da-9845-4c1a-8ec8-98f610a7cc9a-serving-cert\") pod \"8ed9d7da-9845-4c1a-8ec8-98f610a7cc9a\" (UID: \"8ed9d7da-9845-4c1a-8ec8-98f610a7cc9a\") " Sep 29 19:20:47 crc kubenswrapper[4779]: I0929 19:20:47.925901 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3197e1c4-03d1-42f1-8bee-87a97962cf70-serving-cert\") pod \"3197e1c4-03d1-42f1-8bee-87a97962cf70\" (UID: \"3197e1c4-03d1-42f1-8bee-87a97962cf70\") " Sep 29 19:20:47 crc kubenswrapper[4779]: I0929 19:20:47.925950 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3197e1c4-03d1-42f1-8bee-87a97962cf70-config\") pod \"3197e1c4-03d1-42f1-8bee-87a97962cf70\" (UID: \"3197e1c4-03d1-42f1-8bee-87a97962cf70\") " Sep 29 19:20:47 crc kubenswrapper[4779]: I0929 19:20:47.926003 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/3197e1c4-03d1-42f1-8bee-87a97962cf70-client-ca\") pod \"3197e1c4-03d1-42f1-8bee-87a97962cf70\" (UID: \"3197e1c4-03d1-42f1-8bee-87a97962cf70\") " Sep 29 19:20:47 crc kubenswrapper[4779]: I0929 19:20:47.926046 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-p649g\" (UniqueName: \"kubernetes.io/projected/3197e1c4-03d1-42f1-8bee-87a97962cf70-kube-api-access-p649g\") pod \"3197e1c4-03d1-42f1-8bee-87a97962cf70\" (UID: \"3197e1c4-03d1-42f1-8bee-87a97962cf70\") " Sep 29 19:20:47 crc kubenswrapper[4779]: I0929 19:20:47.926096 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/8ed9d7da-9845-4c1a-8ec8-98f610a7cc9a-client-ca\") pod \"8ed9d7da-9845-4c1a-8ec8-98f610a7cc9a\" (UID: \"8ed9d7da-9845-4c1a-8ec8-98f610a7cc9a\") " Sep 29 19:20:47 crc kubenswrapper[4779]: I0929 19:20:47.926119 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8ed9d7da-9845-4c1a-8ec8-98f610a7cc9a-config\") pod \"8ed9d7da-9845-4c1a-8ec8-98f610a7cc9a\" (UID: \"8ed9d7da-9845-4c1a-8ec8-98f610a7cc9a\") " Sep 29 19:20:47 crc kubenswrapper[4779]: I0929 19:20:47.926140 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/8ed9d7da-9845-4c1a-8ec8-98f610a7cc9a-proxy-ca-bundles\") pod \"8ed9d7da-9845-4c1a-8ec8-98f610a7cc9a\" (UID: \"8ed9d7da-9845-4c1a-8ec8-98f610a7cc9a\") " Sep 29 19:20:47 crc kubenswrapper[4779]: I0929 19:20:47.926162 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bmhfc\" (UniqueName: \"kubernetes.io/projected/8ed9d7da-9845-4c1a-8ec8-98f610a7cc9a-kube-api-access-bmhfc\") pod \"8ed9d7da-9845-4c1a-8ec8-98f610a7cc9a\" (UID: \"8ed9d7da-9845-4c1a-8ec8-98f610a7cc9a\") " Sep 29 19:20:47 crc kubenswrapper[4779]: I0929 19:20:47.926984 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3197e1c4-03d1-42f1-8bee-87a97962cf70-config" (OuterVolumeSpecName: "config") pod "3197e1c4-03d1-42f1-8bee-87a97962cf70" (UID: "3197e1c4-03d1-42f1-8bee-87a97962cf70"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:20:47 crc kubenswrapper[4779]: I0929 19:20:47.927007 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3197e1c4-03d1-42f1-8bee-87a97962cf70-client-ca" (OuterVolumeSpecName: "client-ca") pod "3197e1c4-03d1-42f1-8bee-87a97962cf70" (UID: "3197e1c4-03d1-42f1-8bee-87a97962cf70"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:20:47 crc kubenswrapper[4779]: I0929 19:20:47.927034 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8ed9d7da-9845-4c1a-8ec8-98f610a7cc9a-client-ca" (OuterVolumeSpecName: "client-ca") pod "8ed9d7da-9845-4c1a-8ec8-98f610a7cc9a" (UID: "8ed9d7da-9845-4c1a-8ec8-98f610a7cc9a"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:20:47 crc kubenswrapper[4779]: I0929 19:20:47.927482 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8ed9d7da-9845-4c1a-8ec8-98f610a7cc9a-config" (OuterVolumeSpecName: "config") pod "8ed9d7da-9845-4c1a-8ec8-98f610a7cc9a" (UID: "8ed9d7da-9845-4c1a-8ec8-98f610a7cc9a"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:20:47 crc kubenswrapper[4779]: I0929 19:20:47.927502 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8ed9d7da-9845-4c1a-8ec8-98f610a7cc9a-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "8ed9d7da-9845-4c1a-8ec8-98f610a7cc9a" (UID: "8ed9d7da-9845-4c1a-8ec8-98f610a7cc9a"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:20:47 crc kubenswrapper[4779]: I0929 19:20:47.931382 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8ed9d7da-9845-4c1a-8ec8-98f610a7cc9a-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "8ed9d7da-9845-4c1a-8ec8-98f610a7cc9a" (UID: "8ed9d7da-9845-4c1a-8ec8-98f610a7cc9a"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:20:47 crc kubenswrapper[4779]: I0929 19:20:47.931672 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3197e1c4-03d1-42f1-8bee-87a97962cf70-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "3197e1c4-03d1-42f1-8bee-87a97962cf70" (UID: "3197e1c4-03d1-42f1-8bee-87a97962cf70"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:20:47 crc kubenswrapper[4779]: I0929 19:20:47.931691 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8ed9d7da-9845-4c1a-8ec8-98f610a7cc9a-kube-api-access-bmhfc" (OuterVolumeSpecName: "kube-api-access-bmhfc") pod "8ed9d7da-9845-4c1a-8ec8-98f610a7cc9a" (UID: "8ed9d7da-9845-4c1a-8ec8-98f610a7cc9a"). InnerVolumeSpecName "kube-api-access-bmhfc". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:20:47 crc kubenswrapper[4779]: I0929 19:20:47.931735 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3197e1c4-03d1-42f1-8bee-87a97962cf70-kube-api-access-p649g" (OuterVolumeSpecName: "kube-api-access-p649g") pod "3197e1c4-03d1-42f1-8bee-87a97962cf70" (UID: "3197e1c4-03d1-42f1-8bee-87a97962cf70"). InnerVolumeSpecName "kube-api-access-p649g". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:20:48 crc kubenswrapper[4779]: I0929 19:20:48.027340 4779 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3197e1c4-03d1-42f1-8bee-87a97962cf70-config\") on node \"crc\" DevicePath \"\"" Sep 29 19:20:48 crc kubenswrapper[4779]: I0929 19:20:48.027441 4779 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/3197e1c4-03d1-42f1-8bee-87a97962cf70-client-ca\") on node \"crc\" DevicePath \"\"" Sep 29 19:20:48 crc kubenswrapper[4779]: I0929 19:20:48.027467 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-p649g\" (UniqueName: \"kubernetes.io/projected/3197e1c4-03d1-42f1-8bee-87a97962cf70-kube-api-access-p649g\") on node \"crc\" DevicePath \"\"" Sep 29 19:20:48 crc kubenswrapper[4779]: I0929 19:20:48.027482 4779 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/8ed9d7da-9845-4c1a-8ec8-98f610a7cc9a-client-ca\") on node \"crc\" DevicePath \"\"" Sep 29 19:20:48 crc kubenswrapper[4779]: I0929 19:20:48.027493 4779 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8ed9d7da-9845-4c1a-8ec8-98f610a7cc9a-config\") on node \"crc\" DevicePath \"\"" Sep 29 19:20:48 crc kubenswrapper[4779]: I0929 19:20:48.027504 4779 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/8ed9d7da-9845-4c1a-8ec8-98f610a7cc9a-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Sep 29 19:20:48 crc kubenswrapper[4779]: I0929 19:20:48.027516 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bmhfc\" (UniqueName: \"kubernetes.io/projected/8ed9d7da-9845-4c1a-8ec8-98f610a7cc9a-kube-api-access-bmhfc\") on node \"crc\" DevicePath \"\"" Sep 29 19:20:48 crc kubenswrapper[4779]: I0929 19:20:48.027553 4779 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8ed9d7da-9845-4c1a-8ec8-98f610a7cc9a-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 29 19:20:48 crc kubenswrapper[4779]: I0929 19:20:48.027565 4779 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3197e1c4-03d1-42f1-8bee-87a97962cf70-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 29 19:20:48 crc kubenswrapper[4779]: I0929 19:20:48.251982 4779 generic.go:334] "Generic (PLEG): container finished" podID="3197e1c4-03d1-42f1-8bee-87a97962cf70" containerID="d8509e6555061139b1cde95100700ea82f9cb6f30680698f144adeffb580a46c" exitCode=0 Sep 29 19:20:48 crc kubenswrapper[4779]: I0929 19:20:48.252089 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-s9rfn" event={"ID":"3197e1c4-03d1-42f1-8bee-87a97962cf70","Type":"ContainerDied","Data":"d8509e6555061139b1cde95100700ea82f9cb6f30680698f144adeffb580a46c"} Sep 29 19:20:48 crc kubenswrapper[4779]: I0929 19:20:48.252098 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-s9rfn" Sep 29 19:20:48 crc kubenswrapper[4779]: I0929 19:20:48.252126 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-s9rfn" event={"ID":"3197e1c4-03d1-42f1-8bee-87a97962cf70","Type":"ContainerDied","Data":"349290492a5f8bc336f70b7acb78d292b9164a86b02f61aab5619cf841d8a75c"} Sep 29 19:20:48 crc kubenswrapper[4779]: I0929 19:20:48.252155 4779 scope.go:117] "RemoveContainer" containerID="d8509e6555061139b1cde95100700ea82f9cb6f30680698f144adeffb580a46c" Sep 29 19:20:48 crc kubenswrapper[4779]: I0929 19:20:48.254697 4779 generic.go:334] "Generic (PLEG): container finished" podID="8ed9d7da-9845-4c1a-8ec8-98f610a7cc9a" containerID="5ebe0ee1ae511cf13e2ac9b802f9f61f7e92082bb38e9022519e610757de4faf" exitCode=0 Sep 29 19:20:48 crc kubenswrapper[4779]: I0929 19:20:48.254785 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-qlvq5" Sep 29 19:20:48 crc kubenswrapper[4779]: I0929 19:20:48.254779 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-qlvq5" event={"ID":"8ed9d7da-9845-4c1a-8ec8-98f610a7cc9a","Type":"ContainerDied","Data":"5ebe0ee1ae511cf13e2ac9b802f9f61f7e92082bb38e9022519e610757de4faf"} Sep 29 19:20:48 crc kubenswrapper[4779]: I0929 19:20:48.254975 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-qlvq5" event={"ID":"8ed9d7da-9845-4c1a-8ec8-98f610a7cc9a","Type":"ContainerDied","Data":"5a6472b7d5d60a9aab5e97607758595d7625f339234bcb9c343878ccbbd6bc73"} Sep 29 19:20:48 crc kubenswrapper[4779]: I0929 19:20:48.278102 4779 scope.go:117] "RemoveContainer" containerID="d8509e6555061139b1cde95100700ea82f9cb6f30680698f144adeffb580a46c" Sep 29 19:20:48 crc kubenswrapper[4779]: E0929 19:20:48.278707 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d8509e6555061139b1cde95100700ea82f9cb6f30680698f144adeffb580a46c\": container with ID starting with d8509e6555061139b1cde95100700ea82f9cb6f30680698f144adeffb580a46c not found: ID does not exist" containerID="d8509e6555061139b1cde95100700ea82f9cb6f30680698f144adeffb580a46c" Sep 29 19:20:48 crc kubenswrapper[4779]: I0929 19:20:48.278769 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d8509e6555061139b1cde95100700ea82f9cb6f30680698f144adeffb580a46c"} err="failed to get container status \"d8509e6555061139b1cde95100700ea82f9cb6f30680698f144adeffb580a46c\": rpc error: code = NotFound desc = could not find container \"d8509e6555061139b1cde95100700ea82f9cb6f30680698f144adeffb580a46c\": container with ID starting with d8509e6555061139b1cde95100700ea82f9cb6f30680698f144adeffb580a46c not found: ID does not exist" Sep 29 19:20:48 crc kubenswrapper[4779]: I0929 19:20:48.278802 4779 scope.go:117] "RemoveContainer" containerID="5ebe0ee1ae511cf13e2ac9b802f9f61f7e92082bb38e9022519e610757de4faf" Sep 29 19:20:48 crc kubenswrapper[4779]: I0929 19:20:48.305288 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-qlvq5"] Sep 29 19:20:48 crc kubenswrapper[4779]: I0929 19:20:48.308715 4779 scope.go:117] "RemoveContainer" containerID="5ebe0ee1ae511cf13e2ac9b802f9f61f7e92082bb38e9022519e610757de4faf" Sep 29 19:20:48 crc kubenswrapper[4779]: E0929 19:20:48.309311 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5ebe0ee1ae511cf13e2ac9b802f9f61f7e92082bb38e9022519e610757de4faf\": container with ID starting with 5ebe0ee1ae511cf13e2ac9b802f9f61f7e92082bb38e9022519e610757de4faf not found: ID does not exist" containerID="5ebe0ee1ae511cf13e2ac9b802f9f61f7e92082bb38e9022519e610757de4faf" Sep 29 19:20:48 crc kubenswrapper[4779]: I0929 19:20:48.309393 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5ebe0ee1ae511cf13e2ac9b802f9f61f7e92082bb38e9022519e610757de4faf"} err="failed to get container status \"5ebe0ee1ae511cf13e2ac9b802f9f61f7e92082bb38e9022519e610757de4faf\": rpc error: code = NotFound desc = could not find container \"5ebe0ee1ae511cf13e2ac9b802f9f61f7e92082bb38e9022519e610757de4faf\": container with ID starting with 5ebe0ee1ae511cf13e2ac9b802f9f61f7e92082bb38e9022519e610757de4faf not found: ID does not exist" Sep 29 19:20:48 crc kubenswrapper[4779]: I0929 19:20:48.312133 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-qlvq5"] Sep 29 19:20:48 crc kubenswrapper[4779]: I0929 19:20:48.325033 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-s9rfn"] Sep 29 19:20:48 crc kubenswrapper[4779]: I0929 19:20:48.330777 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-s9rfn"] Sep 29 19:20:48 crc kubenswrapper[4779]: I0929 19:20:48.944789 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-67b9fdffdd-gjtf8"] Sep 29 19:20:48 crc kubenswrapper[4779]: E0929 19:20:48.945396 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3197e1c4-03d1-42f1-8bee-87a97962cf70" containerName="route-controller-manager" Sep 29 19:20:48 crc kubenswrapper[4779]: I0929 19:20:48.945417 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="3197e1c4-03d1-42f1-8bee-87a97962cf70" containerName="route-controller-manager" Sep 29 19:20:48 crc kubenswrapper[4779]: E0929 19:20:48.945444 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8ed9d7da-9845-4c1a-8ec8-98f610a7cc9a" containerName="controller-manager" Sep 29 19:20:48 crc kubenswrapper[4779]: I0929 19:20:48.945457 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="8ed9d7da-9845-4c1a-8ec8-98f610a7cc9a" containerName="controller-manager" Sep 29 19:20:48 crc kubenswrapper[4779]: I0929 19:20:48.945622 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="8ed9d7da-9845-4c1a-8ec8-98f610a7cc9a" containerName="controller-manager" Sep 29 19:20:48 crc kubenswrapper[4779]: I0929 19:20:48.945657 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="3197e1c4-03d1-42f1-8bee-87a97962cf70" containerName="route-controller-manager" Sep 29 19:20:48 crc kubenswrapper[4779]: I0929 19:20:48.946247 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-67b9fdffdd-gjtf8" Sep 29 19:20:48 crc kubenswrapper[4779]: I0929 19:20:48.947794 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Sep 29 19:20:48 crc kubenswrapper[4779]: I0929 19:20:48.948237 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Sep 29 19:20:48 crc kubenswrapper[4779]: I0929 19:20:48.948433 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Sep 29 19:20:48 crc kubenswrapper[4779]: I0929 19:20:48.948640 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Sep 29 19:20:48 crc kubenswrapper[4779]: I0929 19:20:48.948804 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Sep 29 19:20:48 crc kubenswrapper[4779]: I0929 19:20:48.951050 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Sep 29 19:20:48 crc kubenswrapper[4779]: I0929 19:20:48.958937 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-67b9fdffdd-gjtf8"] Sep 29 19:20:49 crc kubenswrapper[4779]: I0929 19:20:49.032972 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-b46d75df9-r2w25"] Sep 29 19:20:49 crc kubenswrapper[4779]: I0929 19:20:49.033686 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-b46d75df9-r2w25" Sep 29 19:20:49 crc kubenswrapper[4779]: I0929 19:20:49.036570 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Sep 29 19:20:49 crc kubenswrapper[4779]: I0929 19:20:49.036808 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Sep 29 19:20:49 crc kubenswrapper[4779]: I0929 19:20:49.037011 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Sep 29 19:20:49 crc kubenswrapper[4779]: I0929 19:20:49.037274 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Sep 29 19:20:49 crc kubenswrapper[4779]: I0929 19:20:49.037347 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Sep 29 19:20:49 crc kubenswrapper[4779]: I0929 19:20:49.037713 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Sep 29 19:20:49 crc kubenswrapper[4779]: I0929 19:20:49.040012 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5aaea7f6-1850-4767-b8f5-9f23b460dfe9-config\") pod \"route-controller-manager-67b9fdffdd-gjtf8\" (UID: \"5aaea7f6-1850-4767-b8f5-9f23b460dfe9\") " pod="openshift-route-controller-manager/route-controller-manager-67b9fdffdd-gjtf8" Sep 29 19:20:49 crc kubenswrapper[4779]: I0929 19:20:49.040082 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bhpgx\" (UniqueName: \"kubernetes.io/projected/5aaea7f6-1850-4767-b8f5-9f23b460dfe9-kube-api-access-bhpgx\") pod \"route-controller-manager-67b9fdffdd-gjtf8\" (UID: \"5aaea7f6-1850-4767-b8f5-9f23b460dfe9\") " pod="openshift-route-controller-manager/route-controller-manager-67b9fdffdd-gjtf8" Sep 29 19:20:49 crc kubenswrapper[4779]: I0929 19:20:49.040175 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5aaea7f6-1850-4767-b8f5-9f23b460dfe9-serving-cert\") pod \"route-controller-manager-67b9fdffdd-gjtf8\" (UID: \"5aaea7f6-1850-4767-b8f5-9f23b460dfe9\") " pod="openshift-route-controller-manager/route-controller-manager-67b9fdffdd-gjtf8" Sep 29 19:20:49 crc kubenswrapper[4779]: I0929 19:20:49.040220 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5aaea7f6-1850-4767-b8f5-9f23b460dfe9-client-ca\") pod \"route-controller-manager-67b9fdffdd-gjtf8\" (UID: \"5aaea7f6-1850-4767-b8f5-9f23b460dfe9\") " pod="openshift-route-controller-manager/route-controller-manager-67b9fdffdd-gjtf8" Sep 29 19:20:49 crc kubenswrapper[4779]: I0929 19:20:49.043232 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Sep 29 19:20:49 crc kubenswrapper[4779]: I0929 19:20:49.052594 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-b46d75df9-r2w25"] Sep 29 19:20:49 crc kubenswrapper[4779]: I0929 19:20:49.141631 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5aaea7f6-1850-4767-b8f5-9f23b460dfe9-config\") pod \"route-controller-manager-67b9fdffdd-gjtf8\" (UID: \"5aaea7f6-1850-4767-b8f5-9f23b460dfe9\") " pod="openshift-route-controller-manager/route-controller-manager-67b9fdffdd-gjtf8" Sep 29 19:20:49 crc kubenswrapper[4779]: I0929 19:20:49.141680 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bhpgx\" (UniqueName: \"kubernetes.io/projected/5aaea7f6-1850-4767-b8f5-9f23b460dfe9-kube-api-access-bhpgx\") pod \"route-controller-manager-67b9fdffdd-gjtf8\" (UID: \"5aaea7f6-1850-4767-b8f5-9f23b460dfe9\") " pod="openshift-route-controller-manager/route-controller-manager-67b9fdffdd-gjtf8" Sep 29 19:20:49 crc kubenswrapper[4779]: I0929 19:20:49.141724 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/749f27ae-0353-4986-9160-3c91eea3c225-serving-cert\") pod \"controller-manager-b46d75df9-r2w25\" (UID: \"749f27ae-0353-4986-9160-3c91eea3c225\") " pod="openshift-controller-manager/controller-manager-b46d75df9-r2w25" Sep 29 19:20:49 crc kubenswrapper[4779]: I0929 19:20:49.141741 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5aaea7f6-1850-4767-b8f5-9f23b460dfe9-serving-cert\") pod \"route-controller-manager-67b9fdffdd-gjtf8\" (UID: \"5aaea7f6-1850-4767-b8f5-9f23b460dfe9\") " pod="openshift-route-controller-manager/route-controller-manager-67b9fdffdd-gjtf8" Sep 29 19:20:49 crc kubenswrapper[4779]: I0929 19:20:49.141765 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5aaea7f6-1850-4767-b8f5-9f23b460dfe9-client-ca\") pod \"route-controller-manager-67b9fdffdd-gjtf8\" (UID: \"5aaea7f6-1850-4767-b8f5-9f23b460dfe9\") " pod="openshift-route-controller-manager/route-controller-manager-67b9fdffdd-gjtf8" Sep 29 19:20:49 crc kubenswrapper[4779]: I0929 19:20:49.141875 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/749f27ae-0353-4986-9160-3c91eea3c225-client-ca\") pod \"controller-manager-b46d75df9-r2w25\" (UID: \"749f27ae-0353-4986-9160-3c91eea3c225\") " pod="openshift-controller-manager/controller-manager-b46d75df9-r2w25" Sep 29 19:20:49 crc kubenswrapper[4779]: I0929 19:20:49.141944 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/749f27ae-0353-4986-9160-3c91eea3c225-config\") pod \"controller-manager-b46d75df9-r2w25\" (UID: \"749f27ae-0353-4986-9160-3c91eea3c225\") " pod="openshift-controller-manager/controller-manager-b46d75df9-r2w25" Sep 29 19:20:49 crc kubenswrapper[4779]: I0929 19:20:49.142035 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/749f27ae-0353-4986-9160-3c91eea3c225-proxy-ca-bundles\") pod \"controller-manager-b46d75df9-r2w25\" (UID: \"749f27ae-0353-4986-9160-3c91eea3c225\") " pod="openshift-controller-manager/controller-manager-b46d75df9-r2w25" Sep 29 19:20:49 crc kubenswrapper[4779]: I0929 19:20:49.142105 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cntzg\" (UniqueName: \"kubernetes.io/projected/749f27ae-0353-4986-9160-3c91eea3c225-kube-api-access-cntzg\") pod \"controller-manager-b46d75df9-r2w25\" (UID: \"749f27ae-0353-4986-9160-3c91eea3c225\") " pod="openshift-controller-manager/controller-manager-b46d75df9-r2w25" Sep 29 19:20:49 crc kubenswrapper[4779]: I0929 19:20:49.142697 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5aaea7f6-1850-4767-b8f5-9f23b460dfe9-client-ca\") pod \"route-controller-manager-67b9fdffdd-gjtf8\" (UID: \"5aaea7f6-1850-4767-b8f5-9f23b460dfe9\") " pod="openshift-route-controller-manager/route-controller-manager-67b9fdffdd-gjtf8" Sep 29 19:20:49 crc kubenswrapper[4779]: I0929 19:20:49.142864 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5aaea7f6-1850-4767-b8f5-9f23b460dfe9-config\") pod \"route-controller-manager-67b9fdffdd-gjtf8\" (UID: \"5aaea7f6-1850-4767-b8f5-9f23b460dfe9\") " pod="openshift-route-controller-manager/route-controller-manager-67b9fdffdd-gjtf8" Sep 29 19:20:49 crc kubenswrapper[4779]: I0929 19:20:49.146997 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5aaea7f6-1850-4767-b8f5-9f23b460dfe9-serving-cert\") pod \"route-controller-manager-67b9fdffdd-gjtf8\" (UID: \"5aaea7f6-1850-4767-b8f5-9f23b460dfe9\") " pod="openshift-route-controller-manager/route-controller-manager-67b9fdffdd-gjtf8" Sep 29 19:20:49 crc kubenswrapper[4779]: I0929 19:20:49.162997 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bhpgx\" (UniqueName: \"kubernetes.io/projected/5aaea7f6-1850-4767-b8f5-9f23b460dfe9-kube-api-access-bhpgx\") pod \"route-controller-manager-67b9fdffdd-gjtf8\" (UID: \"5aaea7f6-1850-4767-b8f5-9f23b460dfe9\") " pod="openshift-route-controller-manager/route-controller-manager-67b9fdffdd-gjtf8" Sep 29 19:20:49 crc kubenswrapper[4779]: I0929 19:20:49.243207 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/749f27ae-0353-4986-9160-3c91eea3c225-serving-cert\") pod \"controller-manager-b46d75df9-r2w25\" (UID: \"749f27ae-0353-4986-9160-3c91eea3c225\") " pod="openshift-controller-manager/controller-manager-b46d75df9-r2w25" Sep 29 19:20:49 crc kubenswrapper[4779]: I0929 19:20:49.243269 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/749f27ae-0353-4986-9160-3c91eea3c225-client-ca\") pod \"controller-manager-b46d75df9-r2w25\" (UID: \"749f27ae-0353-4986-9160-3c91eea3c225\") " pod="openshift-controller-manager/controller-manager-b46d75df9-r2w25" Sep 29 19:20:49 crc kubenswrapper[4779]: I0929 19:20:49.243335 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/749f27ae-0353-4986-9160-3c91eea3c225-config\") pod \"controller-manager-b46d75df9-r2w25\" (UID: \"749f27ae-0353-4986-9160-3c91eea3c225\") " pod="openshift-controller-manager/controller-manager-b46d75df9-r2w25" Sep 29 19:20:49 crc kubenswrapper[4779]: I0929 19:20:49.243365 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/749f27ae-0353-4986-9160-3c91eea3c225-proxy-ca-bundles\") pod \"controller-manager-b46d75df9-r2w25\" (UID: \"749f27ae-0353-4986-9160-3c91eea3c225\") " pod="openshift-controller-manager/controller-manager-b46d75df9-r2w25" Sep 29 19:20:49 crc kubenswrapper[4779]: I0929 19:20:49.243407 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cntzg\" (UniqueName: \"kubernetes.io/projected/749f27ae-0353-4986-9160-3c91eea3c225-kube-api-access-cntzg\") pod \"controller-manager-b46d75df9-r2w25\" (UID: \"749f27ae-0353-4986-9160-3c91eea3c225\") " pod="openshift-controller-manager/controller-manager-b46d75df9-r2w25" Sep 29 19:20:49 crc kubenswrapper[4779]: I0929 19:20:49.244253 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/749f27ae-0353-4986-9160-3c91eea3c225-client-ca\") pod \"controller-manager-b46d75df9-r2w25\" (UID: \"749f27ae-0353-4986-9160-3c91eea3c225\") " pod="openshift-controller-manager/controller-manager-b46d75df9-r2w25" Sep 29 19:20:49 crc kubenswrapper[4779]: I0929 19:20:49.244669 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/749f27ae-0353-4986-9160-3c91eea3c225-config\") pod \"controller-manager-b46d75df9-r2w25\" (UID: \"749f27ae-0353-4986-9160-3c91eea3c225\") " pod="openshift-controller-manager/controller-manager-b46d75df9-r2w25" Sep 29 19:20:49 crc kubenswrapper[4779]: I0929 19:20:49.244820 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/749f27ae-0353-4986-9160-3c91eea3c225-proxy-ca-bundles\") pod \"controller-manager-b46d75df9-r2w25\" (UID: \"749f27ae-0353-4986-9160-3c91eea3c225\") " pod="openshift-controller-manager/controller-manager-b46d75df9-r2w25" Sep 29 19:20:49 crc kubenswrapper[4779]: I0929 19:20:49.245899 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/749f27ae-0353-4986-9160-3c91eea3c225-serving-cert\") pod \"controller-manager-b46d75df9-r2w25\" (UID: \"749f27ae-0353-4986-9160-3c91eea3c225\") " pod="openshift-controller-manager/controller-manager-b46d75df9-r2w25" Sep 29 19:20:49 crc kubenswrapper[4779]: I0929 19:20:49.258783 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cntzg\" (UniqueName: \"kubernetes.io/projected/749f27ae-0353-4986-9160-3c91eea3c225-kube-api-access-cntzg\") pod \"controller-manager-b46d75df9-r2w25\" (UID: \"749f27ae-0353-4986-9160-3c91eea3c225\") " pod="openshift-controller-manager/controller-manager-b46d75df9-r2w25" Sep 29 19:20:49 crc kubenswrapper[4779]: I0929 19:20:49.264751 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-67b9fdffdd-gjtf8" Sep 29 19:20:49 crc kubenswrapper[4779]: I0929 19:20:49.353767 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-b46d75df9-r2w25" Sep 29 19:20:49 crc kubenswrapper[4779]: I0929 19:20:49.518267 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-67b9fdffdd-gjtf8"] Sep 29 19:20:49 crc kubenswrapper[4779]: I0929 19:20:49.788369 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3197e1c4-03d1-42f1-8bee-87a97962cf70" path="/var/lib/kubelet/pods/3197e1c4-03d1-42f1-8bee-87a97962cf70/volumes" Sep 29 19:20:49 crc kubenswrapper[4779]: I0929 19:20:49.790837 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8ed9d7da-9845-4c1a-8ec8-98f610a7cc9a" path="/var/lib/kubelet/pods/8ed9d7da-9845-4c1a-8ec8-98f610a7cc9a/volumes" Sep 29 19:20:49 crc kubenswrapper[4779]: I0929 19:20:49.842512 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-b46d75df9-r2w25"] Sep 29 19:20:50 crc kubenswrapper[4779]: I0929 19:20:50.267579 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-67b9fdffdd-gjtf8" event={"ID":"5aaea7f6-1850-4767-b8f5-9f23b460dfe9","Type":"ContainerStarted","Data":"8ee1c0925cccc1edb8ec49181a53dae6df0e8509fd8ef857ac914baea4f84fb3"} Sep 29 19:20:50 crc kubenswrapper[4779]: I0929 19:20:50.267841 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-67b9fdffdd-gjtf8" event={"ID":"5aaea7f6-1850-4767-b8f5-9f23b460dfe9","Type":"ContainerStarted","Data":"2ab90f7ba8fd63c1890e7eedd99069bb6bd44b16f896db02ddbff5c2b8aaa40a"} Sep 29 19:20:50 crc kubenswrapper[4779]: I0929 19:20:50.268729 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-67b9fdffdd-gjtf8" Sep 29 19:20:50 crc kubenswrapper[4779]: I0929 19:20:50.270156 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-b46d75df9-r2w25" event={"ID":"749f27ae-0353-4986-9160-3c91eea3c225","Type":"ContainerStarted","Data":"5c2073ce6ffaab1d16a89622f472467dbde20951d94950dc99a99b1f275d4100"} Sep 29 19:20:50 crc kubenswrapper[4779]: I0929 19:20:50.270232 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-b46d75df9-r2w25" event={"ID":"749f27ae-0353-4986-9160-3c91eea3c225","Type":"ContainerStarted","Data":"2804728cd2a61f8778e56d002dc4ea8febbaefa11c010d48cdab31c2f101b4d1"} Sep 29 19:20:50 crc kubenswrapper[4779]: I0929 19:20:50.270407 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-b46d75df9-r2w25" Sep 29 19:20:50 crc kubenswrapper[4779]: I0929 19:20:50.273973 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-67b9fdffdd-gjtf8" Sep 29 19:20:50 crc kubenswrapper[4779]: I0929 19:20:50.300389 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-67b9fdffdd-gjtf8" podStartSLOduration=2.300369454 podStartE2EDuration="2.300369454s" podCreationTimestamp="2025-09-29 19:20:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 19:20:50.292612463 +0000 UTC m=+761.177037583" watchObservedRunningTime="2025-09-29 19:20:50.300369454 +0000 UTC m=+761.184794564" Sep 29 19:20:50 crc kubenswrapper[4779]: I0929 19:20:50.300983 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-b46d75df9-r2w25" Sep 29 19:20:50 crc kubenswrapper[4779]: I0929 19:20:50.387538 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-b46d75df9-r2w25" podStartSLOduration=3.387520587 podStartE2EDuration="3.387520587s" podCreationTimestamp="2025-09-29 19:20:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 19:20:50.366077563 +0000 UTC m=+761.250502663" watchObservedRunningTime="2025-09-29 19:20:50.387520587 +0000 UTC m=+761.271945687" Sep 29 19:20:52 crc kubenswrapper[4779]: I0929 19:20:52.930670 4779 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Sep 29 19:20:58 crc kubenswrapper[4779]: I0929 19:20:58.090306 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/metallb-operator-controller-manager-6ccfd99bc8-cd86r" Sep 29 19:20:58 crc kubenswrapper[4779]: I0929 19:20:58.916357 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/frr-k8s-webhook-server-5478bdb765-6f4p6"] Sep 29 19:20:58 crc kubenswrapper[4779]: I0929 19:20:58.917561 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-webhook-server-5478bdb765-6f4p6" Sep 29 19:20:58 crc kubenswrapper[4779]: I0929 19:20:58.920187 4779 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-daemon-dockercfg-jg6rc" Sep 29 19:20:58 crc kubenswrapper[4779]: I0929 19:20:58.921615 4779 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-webhook-server-cert" Sep 29 19:20:58 crc kubenswrapper[4779]: I0929 19:20:58.923026 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/frr-k8s-4lq9x"] Sep 29 19:20:58 crc kubenswrapper[4779]: I0929 19:20:58.928559 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-4lq9x" Sep 29 19:20:58 crc kubenswrapper[4779]: I0929 19:20:58.930802 4779 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-certs-secret" Sep 29 19:20:58 crc kubenswrapper[4779]: I0929 19:20:58.931586 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"frr-startup" Sep 29 19:20:58 crc kubenswrapper[4779]: I0929 19:20:58.932493 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/frr-k8s-webhook-server-5478bdb765-6f4p6"] Sep 29 19:20:58 crc kubenswrapper[4779]: I0929 19:20:58.991022 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mpcmn\" (UniqueName: \"kubernetes.io/projected/09b83c31-96ba-457f-9385-7a124ddbc54d-kube-api-access-mpcmn\") pod \"frr-k8s-webhook-server-5478bdb765-6f4p6\" (UID: \"09b83c31-96ba-457f-9385-7a124ddbc54d\") " pod="metallb-system/frr-k8s-webhook-server-5478bdb765-6f4p6" Sep 29 19:20:58 crc kubenswrapper[4779]: I0929 19:20:58.991074 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/be71c6d8-ba0b-46da-a558-f6c91cfddd59-frr-sockets\") pod \"frr-k8s-4lq9x\" (UID: \"be71c6d8-ba0b-46da-a558-f6c91cfddd59\") " pod="metallb-system/frr-k8s-4lq9x" Sep 29 19:20:58 crc kubenswrapper[4779]: I0929 19:20:58.991188 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/be71c6d8-ba0b-46da-a558-f6c91cfddd59-frr-conf\") pod \"frr-k8s-4lq9x\" (UID: \"be71c6d8-ba0b-46da-a558-f6c91cfddd59\") " pod="metallb-system/frr-k8s-4lq9x" Sep 29 19:20:58 crc kubenswrapper[4779]: I0929 19:20:58.991274 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/be71c6d8-ba0b-46da-a558-f6c91cfddd59-reloader\") pod \"frr-k8s-4lq9x\" (UID: \"be71c6d8-ba0b-46da-a558-f6c91cfddd59\") " pod="metallb-system/frr-k8s-4lq9x" Sep 29 19:20:58 crc kubenswrapper[4779]: I0929 19:20:58.991304 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/09b83c31-96ba-457f-9385-7a124ddbc54d-cert\") pod \"frr-k8s-webhook-server-5478bdb765-6f4p6\" (UID: \"09b83c31-96ba-457f-9385-7a124ddbc54d\") " pod="metallb-system/frr-k8s-webhook-server-5478bdb765-6f4p6" Sep 29 19:20:58 crc kubenswrapper[4779]: I0929 19:20:58.991335 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/be71c6d8-ba0b-46da-a558-f6c91cfddd59-metrics-certs\") pod \"frr-k8s-4lq9x\" (UID: \"be71c6d8-ba0b-46da-a558-f6c91cfddd59\") " pod="metallb-system/frr-k8s-4lq9x" Sep 29 19:20:58 crc kubenswrapper[4779]: I0929 19:20:58.991374 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/be71c6d8-ba0b-46da-a558-f6c91cfddd59-frr-startup\") pod \"frr-k8s-4lq9x\" (UID: \"be71c6d8-ba0b-46da-a558-f6c91cfddd59\") " pod="metallb-system/frr-k8s-4lq9x" Sep 29 19:20:58 crc kubenswrapper[4779]: I0929 19:20:58.991423 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-btplp\" (UniqueName: \"kubernetes.io/projected/be71c6d8-ba0b-46da-a558-f6c91cfddd59-kube-api-access-btplp\") pod \"frr-k8s-4lq9x\" (UID: \"be71c6d8-ba0b-46da-a558-f6c91cfddd59\") " pod="metallb-system/frr-k8s-4lq9x" Sep 29 19:20:58 crc kubenswrapper[4779]: I0929 19:20:58.991439 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/be71c6d8-ba0b-46da-a558-f6c91cfddd59-metrics\") pod \"frr-k8s-4lq9x\" (UID: \"be71c6d8-ba0b-46da-a558-f6c91cfddd59\") " pod="metallb-system/frr-k8s-4lq9x" Sep 29 19:20:58 crc kubenswrapper[4779]: I0929 19:20:58.994502 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/speaker-2v7nb"] Sep 29 19:20:58 crc kubenswrapper[4779]: I0929 19:20:58.995571 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/speaker-2v7nb" Sep 29 19:20:59 crc kubenswrapper[4779]: I0929 19:20:59.015796 4779 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-memberlist" Sep 29 19:20:59 crc kubenswrapper[4779]: I0929 19:20:59.016053 4779 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"speaker-dockercfg-wtcfs" Sep 29 19:20:59 crc kubenswrapper[4779]: I0929 19:20:59.016083 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"metallb-excludel2" Sep 29 19:20:59 crc kubenswrapper[4779]: I0929 19:20:59.016810 4779 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"speaker-certs-secret" Sep 29 19:20:59 crc kubenswrapper[4779]: I0929 19:20:59.021817 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/controller-5d688f5ffc-l5gr5"] Sep 29 19:20:59 crc kubenswrapper[4779]: I0929 19:20:59.030372 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/controller-5d688f5ffc-l5gr5" Sep 29 19:20:59 crc kubenswrapper[4779]: I0929 19:20:59.034071 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/controller-5d688f5ffc-l5gr5"] Sep 29 19:20:59 crc kubenswrapper[4779]: I0929 19:20:59.034618 4779 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"controller-certs-secret" Sep 29 19:20:59 crc kubenswrapper[4779]: I0929 19:20:59.092520 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/be71c6d8-ba0b-46da-a558-f6c91cfddd59-frr-startup\") pod \"frr-k8s-4lq9x\" (UID: \"be71c6d8-ba0b-46da-a558-f6c91cfddd59\") " pod="metallb-system/frr-k8s-4lq9x" Sep 29 19:20:59 crc kubenswrapper[4779]: I0929 19:20:59.092583 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/b16e724b-3b5e-46e6-bb8a-1aebd631d549-metallb-excludel2\") pod \"speaker-2v7nb\" (UID: \"b16e724b-3b5e-46e6-bb8a-1aebd631d549\") " pod="metallb-system/speaker-2v7nb" Sep 29 19:20:59 crc kubenswrapper[4779]: I0929 19:20:59.092707 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/be71c6d8-ba0b-46da-a558-f6c91cfddd59-metrics\") pod \"frr-k8s-4lq9x\" (UID: \"be71c6d8-ba0b-46da-a558-f6c91cfddd59\") " pod="metallb-system/frr-k8s-4lq9x" Sep 29 19:20:59 crc kubenswrapper[4779]: I0929 19:20:59.092735 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-btplp\" (UniqueName: \"kubernetes.io/projected/be71c6d8-ba0b-46da-a558-f6c91cfddd59-kube-api-access-btplp\") pod \"frr-k8s-4lq9x\" (UID: \"be71c6d8-ba0b-46da-a558-f6c91cfddd59\") " pod="metallb-system/frr-k8s-4lq9x" Sep 29 19:20:59 crc kubenswrapper[4779]: I0929 19:20:59.093080 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/be71c6d8-ba0b-46da-a558-f6c91cfddd59-metrics\") pod \"frr-k8s-4lq9x\" (UID: \"be71c6d8-ba0b-46da-a558-f6c91cfddd59\") " pod="metallb-system/frr-k8s-4lq9x" Sep 29 19:20:59 crc kubenswrapper[4779]: I0929 19:20:59.093167 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/127fd6d6-e32b-4152-9f62-23b6b051318d-metrics-certs\") pod \"controller-5d688f5ffc-l5gr5\" (UID: \"127fd6d6-e32b-4152-9f62-23b6b051318d\") " pod="metallb-system/controller-5d688f5ffc-l5gr5" Sep 29 19:20:59 crc kubenswrapper[4779]: I0929 19:20:59.093230 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/b16e724b-3b5e-46e6-bb8a-1aebd631d549-metrics-certs\") pod \"speaker-2v7nb\" (UID: \"b16e724b-3b5e-46e6-bb8a-1aebd631d549\") " pod="metallb-system/speaker-2v7nb" Sep 29 19:20:59 crc kubenswrapper[4779]: I0929 19:20:59.093277 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mpcmn\" (UniqueName: \"kubernetes.io/projected/09b83c31-96ba-457f-9385-7a124ddbc54d-kube-api-access-mpcmn\") pod \"frr-k8s-webhook-server-5478bdb765-6f4p6\" (UID: \"09b83c31-96ba-457f-9385-7a124ddbc54d\") " pod="metallb-system/frr-k8s-webhook-server-5478bdb765-6f4p6" Sep 29 19:20:59 crc kubenswrapper[4779]: I0929 19:20:59.093671 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-62wk7\" (UniqueName: \"kubernetes.io/projected/b16e724b-3b5e-46e6-bb8a-1aebd631d549-kube-api-access-62wk7\") pod \"speaker-2v7nb\" (UID: \"b16e724b-3b5e-46e6-bb8a-1aebd631d549\") " pod="metallb-system/speaker-2v7nb" Sep 29 19:20:59 crc kubenswrapper[4779]: I0929 19:20:59.093551 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/be71c6d8-ba0b-46da-a558-f6c91cfddd59-frr-startup\") pod \"frr-k8s-4lq9x\" (UID: \"be71c6d8-ba0b-46da-a558-f6c91cfddd59\") " pod="metallb-system/frr-k8s-4lq9x" Sep 29 19:20:59 crc kubenswrapper[4779]: I0929 19:20:59.093756 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vxv9q\" (UniqueName: \"kubernetes.io/projected/127fd6d6-e32b-4152-9f62-23b6b051318d-kube-api-access-vxv9q\") pod \"controller-5d688f5ffc-l5gr5\" (UID: \"127fd6d6-e32b-4152-9f62-23b6b051318d\") " pod="metallb-system/controller-5d688f5ffc-l5gr5" Sep 29 19:20:59 crc kubenswrapper[4779]: I0929 19:20:59.093780 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/be71c6d8-ba0b-46da-a558-f6c91cfddd59-frr-sockets\") pod \"frr-k8s-4lq9x\" (UID: \"be71c6d8-ba0b-46da-a558-f6c91cfddd59\") " pod="metallb-system/frr-k8s-4lq9x" Sep 29 19:20:59 crc kubenswrapper[4779]: I0929 19:20:59.094098 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/b16e724b-3b5e-46e6-bb8a-1aebd631d549-memberlist\") pod \"speaker-2v7nb\" (UID: \"b16e724b-3b5e-46e6-bb8a-1aebd631d549\") " pod="metallb-system/speaker-2v7nb" Sep 29 19:20:59 crc kubenswrapper[4779]: I0929 19:20:59.094035 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/be71c6d8-ba0b-46da-a558-f6c91cfddd59-frr-sockets\") pod \"frr-k8s-4lq9x\" (UID: \"be71c6d8-ba0b-46da-a558-f6c91cfddd59\") " pod="metallb-system/frr-k8s-4lq9x" Sep 29 19:20:59 crc kubenswrapper[4779]: I0929 19:20:59.094194 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/be71c6d8-ba0b-46da-a558-f6c91cfddd59-frr-conf\") pod \"frr-k8s-4lq9x\" (UID: \"be71c6d8-ba0b-46da-a558-f6c91cfddd59\") " pod="metallb-system/frr-k8s-4lq9x" Sep 29 19:20:59 crc kubenswrapper[4779]: I0929 19:20:59.094544 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/be71c6d8-ba0b-46da-a558-f6c91cfddd59-reloader\") pod \"frr-k8s-4lq9x\" (UID: \"be71c6d8-ba0b-46da-a558-f6c91cfddd59\") " pod="metallb-system/frr-k8s-4lq9x" Sep 29 19:20:59 crc kubenswrapper[4779]: I0929 19:20:59.094845 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/127fd6d6-e32b-4152-9f62-23b6b051318d-cert\") pod \"controller-5d688f5ffc-l5gr5\" (UID: \"127fd6d6-e32b-4152-9f62-23b6b051318d\") " pod="metallb-system/controller-5d688f5ffc-l5gr5" Sep 29 19:20:59 crc kubenswrapper[4779]: I0929 19:20:59.094472 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/be71c6d8-ba0b-46da-a558-f6c91cfddd59-frr-conf\") pod \"frr-k8s-4lq9x\" (UID: \"be71c6d8-ba0b-46da-a558-f6c91cfddd59\") " pod="metallb-system/frr-k8s-4lq9x" Sep 29 19:20:59 crc kubenswrapper[4779]: I0929 19:20:59.094791 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/be71c6d8-ba0b-46da-a558-f6c91cfddd59-reloader\") pod \"frr-k8s-4lq9x\" (UID: \"be71c6d8-ba0b-46da-a558-f6c91cfddd59\") " pod="metallb-system/frr-k8s-4lq9x" Sep 29 19:20:59 crc kubenswrapper[4779]: I0929 19:20:59.094997 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/09b83c31-96ba-457f-9385-7a124ddbc54d-cert\") pod \"frr-k8s-webhook-server-5478bdb765-6f4p6\" (UID: \"09b83c31-96ba-457f-9385-7a124ddbc54d\") " pod="metallb-system/frr-k8s-webhook-server-5478bdb765-6f4p6" Sep 29 19:20:59 crc kubenswrapper[4779]: I0929 19:20:59.095040 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/be71c6d8-ba0b-46da-a558-f6c91cfddd59-metrics-certs\") pod \"frr-k8s-4lq9x\" (UID: \"be71c6d8-ba0b-46da-a558-f6c91cfddd59\") " pod="metallb-system/frr-k8s-4lq9x" Sep 29 19:20:59 crc kubenswrapper[4779]: E0929 19:20:59.095132 4779 secret.go:188] Couldn't get secret metallb-system/frr-k8s-certs-secret: secret "frr-k8s-certs-secret" not found Sep 29 19:20:59 crc kubenswrapper[4779]: E0929 19:20:59.095188 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/be71c6d8-ba0b-46da-a558-f6c91cfddd59-metrics-certs podName:be71c6d8-ba0b-46da-a558-f6c91cfddd59 nodeName:}" failed. No retries permitted until 2025-09-29 19:20:59.595170992 +0000 UTC m=+770.479596102 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/be71c6d8-ba0b-46da-a558-f6c91cfddd59-metrics-certs") pod "frr-k8s-4lq9x" (UID: "be71c6d8-ba0b-46da-a558-f6c91cfddd59") : secret "frr-k8s-certs-secret" not found Sep 29 19:20:59 crc kubenswrapper[4779]: I0929 19:20:59.111257 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mpcmn\" (UniqueName: \"kubernetes.io/projected/09b83c31-96ba-457f-9385-7a124ddbc54d-kube-api-access-mpcmn\") pod \"frr-k8s-webhook-server-5478bdb765-6f4p6\" (UID: \"09b83c31-96ba-457f-9385-7a124ddbc54d\") " pod="metallb-system/frr-k8s-webhook-server-5478bdb765-6f4p6" Sep 29 19:20:59 crc kubenswrapper[4779]: I0929 19:20:59.113921 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/09b83c31-96ba-457f-9385-7a124ddbc54d-cert\") pod \"frr-k8s-webhook-server-5478bdb765-6f4p6\" (UID: \"09b83c31-96ba-457f-9385-7a124ddbc54d\") " pod="metallb-system/frr-k8s-webhook-server-5478bdb765-6f4p6" Sep 29 19:20:59 crc kubenswrapper[4779]: I0929 19:20:59.114716 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-btplp\" (UniqueName: \"kubernetes.io/projected/be71c6d8-ba0b-46da-a558-f6c91cfddd59-kube-api-access-btplp\") pod \"frr-k8s-4lq9x\" (UID: \"be71c6d8-ba0b-46da-a558-f6c91cfddd59\") " pod="metallb-system/frr-k8s-4lq9x" Sep 29 19:20:59 crc kubenswrapper[4779]: I0929 19:20:59.196009 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/b16e724b-3b5e-46e6-bb8a-1aebd631d549-metrics-certs\") pod \"speaker-2v7nb\" (UID: \"b16e724b-3b5e-46e6-bb8a-1aebd631d549\") " pod="metallb-system/speaker-2v7nb" Sep 29 19:20:59 crc kubenswrapper[4779]: I0929 19:20:59.196064 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-62wk7\" (UniqueName: \"kubernetes.io/projected/b16e724b-3b5e-46e6-bb8a-1aebd631d549-kube-api-access-62wk7\") pod \"speaker-2v7nb\" (UID: \"b16e724b-3b5e-46e6-bb8a-1aebd631d549\") " pod="metallb-system/speaker-2v7nb" Sep 29 19:20:59 crc kubenswrapper[4779]: I0929 19:20:59.196086 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vxv9q\" (UniqueName: \"kubernetes.io/projected/127fd6d6-e32b-4152-9f62-23b6b051318d-kube-api-access-vxv9q\") pod \"controller-5d688f5ffc-l5gr5\" (UID: \"127fd6d6-e32b-4152-9f62-23b6b051318d\") " pod="metallb-system/controller-5d688f5ffc-l5gr5" Sep 29 19:20:59 crc kubenswrapper[4779]: I0929 19:20:59.196101 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/b16e724b-3b5e-46e6-bb8a-1aebd631d549-memberlist\") pod \"speaker-2v7nb\" (UID: \"b16e724b-3b5e-46e6-bb8a-1aebd631d549\") " pod="metallb-system/speaker-2v7nb" Sep 29 19:20:59 crc kubenswrapper[4779]: I0929 19:20:59.196141 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/127fd6d6-e32b-4152-9f62-23b6b051318d-cert\") pod \"controller-5d688f5ffc-l5gr5\" (UID: \"127fd6d6-e32b-4152-9f62-23b6b051318d\") " pod="metallb-system/controller-5d688f5ffc-l5gr5" Sep 29 19:20:59 crc kubenswrapper[4779]: I0929 19:20:59.196178 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/b16e724b-3b5e-46e6-bb8a-1aebd631d549-metallb-excludel2\") pod \"speaker-2v7nb\" (UID: \"b16e724b-3b5e-46e6-bb8a-1aebd631d549\") " pod="metallb-system/speaker-2v7nb" Sep 29 19:20:59 crc kubenswrapper[4779]: I0929 19:20:59.196204 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/127fd6d6-e32b-4152-9f62-23b6b051318d-metrics-certs\") pod \"controller-5d688f5ffc-l5gr5\" (UID: \"127fd6d6-e32b-4152-9f62-23b6b051318d\") " pod="metallb-system/controller-5d688f5ffc-l5gr5" Sep 29 19:20:59 crc kubenswrapper[4779]: E0929 19:20:59.196807 4779 secret.go:188] Couldn't get secret metallb-system/speaker-certs-secret: secret "speaker-certs-secret" not found Sep 29 19:20:59 crc kubenswrapper[4779]: E0929 19:20:59.196889 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/b16e724b-3b5e-46e6-bb8a-1aebd631d549-metrics-certs podName:b16e724b-3b5e-46e6-bb8a-1aebd631d549 nodeName:}" failed. No retries permitted until 2025-09-29 19:20:59.696869661 +0000 UTC m=+770.581294871 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/b16e724b-3b5e-46e6-bb8a-1aebd631d549-metrics-certs") pod "speaker-2v7nb" (UID: "b16e724b-3b5e-46e6-bb8a-1aebd631d549") : secret "speaker-certs-secret" not found Sep 29 19:20:59 crc kubenswrapper[4779]: I0929 19:20:59.196921 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/b16e724b-3b5e-46e6-bb8a-1aebd631d549-metallb-excludel2\") pod \"speaker-2v7nb\" (UID: \"b16e724b-3b5e-46e6-bb8a-1aebd631d549\") " pod="metallb-system/speaker-2v7nb" Sep 29 19:20:59 crc kubenswrapper[4779]: E0929 19:20:59.197023 4779 secret.go:188] Couldn't get secret metallb-system/metallb-memberlist: secret "metallb-memberlist" not found Sep 29 19:20:59 crc kubenswrapper[4779]: E0929 19:20:59.197102 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/b16e724b-3b5e-46e6-bb8a-1aebd631d549-memberlist podName:b16e724b-3b5e-46e6-bb8a-1aebd631d549 nodeName:}" failed. No retries permitted until 2025-09-29 19:20:59.697086707 +0000 UTC m=+770.581511807 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "memberlist" (UniqueName: "kubernetes.io/secret/b16e724b-3b5e-46e6-bb8a-1aebd631d549-memberlist") pod "speaker-2v7nb" (UID: "b16e724b-3b5e-46e6-bb8a-1aebd631d549") : secret "metallb-memberlist" not found Sep 29 19:20:59 crc kubenswrapper[4779]: I0929 19:20:59.199991 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/127fd6d6-e32b-4152-9f62-23b6b051318d-cert\") pod \"controller-5d688f5ffc-l5gr5\" (UID: \"127fd6d6-e32b-4152-9f62-23b6b051318d\") " pod="metallb-system/controller-5d688f5ffc-l5gr5" Sep 29 19:20:59 crc kubenswrapper[4779]: I0929 19:20:59.200124 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/127fd6d6-e32b-4152-9f62-23b6b051318d-metrics-certs\") pod \"controller-5d688f5ffc-l5gr5\" (UID: \"127fd6d6-e32b-4152-9f62-23b6b051318d\") " pod="metallb-system/controller-5d688f5ffc-l5gr5" Sep 29 19:20:59 crc kubenswrapper[4779]: I0929 19:20:59.224215 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vxv9q\" (UniqueName: \"kubernetes.io/projected/127fd6d6-e32b-4152-9f62-23b6b051318d-kube-api-access-vxv9q\") pod \"controller-5d688f5ffc-l5gr5\" (UID: \"127fd6d6-e32b-4152-9f62-23b6b051318d\") " pod="metallb-system/controller-5d688f5ffc-l5gr5" Sep 29 19:20:59 crc kubenswrapper[4779]: I0929 19:20:59.230226 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-62wk7\" (UniqueName: \"kubernetes.io/projected/b16e724b-3b5e-46e6-bb8a-1aebd631d549-kube-api-access-62wk7\") pod \"speaker-2v7nb\" (UID: \"b16e724b-3b5e-46e6-bb8a-1aebd631d549\") " pod="metallb-system/speaker-2v7nb" Sep 29 19:20:59 crc kubenswrapper[4779]: I0929 19:20:59.239365 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-webhook-server-5478bdb765-6f4p6" Sep 29 19:20:59 crc kubenswrapper[4779]: I0929 19:20:59.348898 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/controller-5d688f5ffc-l5gr5" Sep 29 19:20:59 crc kubenswrapper[4779]: I0929 19:20:59.600538 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/be71c6d8-ba0b-46da-a558-f6c91cfddd59-metrics-certs\") pod \"frr-k8s-4lq9x\" (UID: \"be71c6d8-ba0b-46da-a558-f6c91cfddd59\") " pod="metallb-system/frr-k8s-4lq9x" Sep 29 19:20:59 crc kubenswrapper[4779]: I0929 19:20:59.604188 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/be71c6d8-ba0b-46da-a558-f6c91cfddd59-metrics-certs\") pod \"frr-k8s-4lq9x\" (UID: \"be71c6d8-ba0b-46da-a558-f6c91cfddd59\") " pod="metallb-system/frr-k8s-4lq9x" Sep 29 19:20:59 crc kubenswrapper[4779]: I0929 19:20:59.701138 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/b16e724b-3b5e-46e6-bb8a-1aebd631d549-metrics-certs\") pod \"speaker-2v7nb\" (UID: \"b16e724b-3b5e-46e6-bb8a-1aebd631d549\") " pod="metallb-system/speaker-2v7nb" Sep 29 19:20:59 crc kubenswrapper[4779]: I0929 19:20:59.701192 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/b16e724b-3b5e-46e6-bb8a-1aebd631d549-memberlist\") pod \"speaker-2v7nb\" (UID: \"b16e724b-3b5e-46e6-bb8a-1aebd631d549\") " pod="metallb-system/speaker-2v7nb" Sep 29 19:20:59 crc kubenswrapper[4779]: E0929 19:20:59.701358 4779 secret.go:188] Couldn't get secret metallb-system/metallb-memberlist: secret "metallb-memberlist" not found Sep 29 19:20:59 crc kubenswrapper[4779]: E0929 19:20:59.701403 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/b16e724b-3b5e-46e6-bb8a-1aebd631d549-memberlist podName:b16e724b-3b5e-46e6-bb8a-1aebd631d549 nodeName:}" failed. No retries permitted until 2025-09-29 19:21:00.701390056 +0000 UTC m=+771.585815156 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "memberlist" (UniqueName: "kubernetes.io/secret/b16e724b-3b5e-46e6-bb8a-1aebd631d549-memberlist") pod "speaker-2v7nb" (UID: "b16e724b-3b5e-46e6-bb8a-1aebd631d549") : secret "metallb-memberlist" not found Sep 29 19:20:59 crc kubenswrapper[4779]: I0929 19:20:59.707381 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/b16e724b-3b5e-46e6-bb8a-1aebd631d549-metrics-certs\") pod \"speaker-2v7nb\" (UID: \"b16e724b-3b5e-46e6-bb8a-1aebd631d549\") " pod="metallb-system/speaker-2v7nb" Sep 29 19:20:59 crc kubenswrapper[4779]: I0929 19:20:59.747121 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/frr-k8s-webhook-server-5478bdb765-6f4p6"] Sep 29 19:20:59 crc kubenswrapper[4779]: W0929 19:20:59.751995 4779 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod09b83c31_96ba_457f_9385_7a124ddbc54d.slice/crio-9d40102b6e715d16d9ffacc12c309ca9d29683034e4b1549559bd283d78d2a85 WatchSource:0}: Error finding container 9d40102b6e715d16d9ffacc12c309ca9d29683034e4b1549559bd283d78d2a85: Status 404 returned error can't find the container with id 9d40102b6e715d16d9ffacc12c309ca9d29683034e4b1549559bd283d78d2a85 Sep 29 19:20:59 crc kubenswrapper[4779]: I0929 19:20:59.820444 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/controller-5d688f5ffc-l5gr5"] Sep 29 19:20:59 crc kubenswrapper[4779]: W0929 19:20:59.824702 4779 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod127fd6d6_e32b_4152_9f62_23b6b051318d.slice/crio-00d60fffda0e4cf213e880241c1a9594d8ee3ea759449877b88aff828570bc88 WatchSource:0}: Error finding container 00d60fffda0e4cf213e880241c1a9594d8ee3ea759449877b88aff828570bc88: Status 404 returned error can't find the container with id 00d60fffda0e4cf213e880241c1a9594d8ee3ea759449877b88aff828570bc88 Sep 29 19:20:59 crc kubenswrapper[4779]: I0929 19:20:59.852754 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-4lq9x" Sep 29 19:21:00 crc kubenswrapper[4779]: I0929 19:21:00.348356 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-5d688f5ffc-l5gr5" event={"ID":"127fd6d6-e32b-4152-9f62-23b6b051318d","Type":"ContainerStarted","Data":"d3a82630428e7bf57c74c669402e29bd666a6c2f6dc6e68945cead4e9589bc2d"} Sep 29 19:21:00 crc kubenswrapper[4779]: I0929 19:21:00.348678 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-5d688f5ffc-l5gr5" event={"ID":"127fd6d6-e32b-4152-9f62-23b6b051318d","Type":"ContainerStarted","Data":"3af46d06c8d035067dd6d986da9cdb3ea47367138f4fc7690f517b098fc4c310"} Sep 29 19:21:00 crc kubenswrapper[4779]: I0929 19:21:00.348699 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/controller-5d688f5ffc-l5gr5" Sep 29 19:21:00 crc kubenswrapper[4779]: I0929 19:21:00.348711 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-5d688f5ffc-l5gr5" event={"ID":"127fd6d6-e32b-4152-9f62-23b6b051318d","Type":"ContainerStarted","Data":"00d60fffda0e4cf213e880241c1a9594d8ee3ea759449877b88aff828570bc88"} Sep 29 19:21:00 crc kubenswrapper[4779]: I0929 19:21:00.350107 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-4lq9x" event={"ID":"be71c6d8-ba0b-46da-a558-f6c91cfddd59","Type":"ContainerStarted","Data":"eef62029370a5054e8d8bcff8b7d4d22d3e766871128a02d9bdd253672f793c8"} Sep 29 19:21:00 crc kubenswrapper[4779]: I0929 19:21:00.351211 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-webhook-server-5478bdb765-6f4p6" event={"ID":"09b83c31-96ba-457f-9385-7a124ddbc54d","Type":"ContainerStarted","Data":"9d40102b6e715d16d9ffacc12c309ca9d29683034e4b1549559bd283d78d2a85"} Sep 29 19:21:00 crc kubenswrapper[4779]: I0929 19:21:00.371088 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/controller-5d688f5ffc-l5gr5" podStartSLOduration=2.371066097 podStartE2EDuration="2.371066097s" podCreationTimestamp="2025-09-29 19:20:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 19:21:00.365425384 +0000 UTC m=+771.249850474" watchObservedRunningTime="2025-09-29 19:21:00.371066097 +0000 UTC m=+771.255491227" Sep 29 19:21:00 crc kubenswrapper[4779]: I0929 19:21:00.717031 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/b16e724b-3b5e-46e6-bb8a-1aebd631d549-memberlist\") pod \"speaker-2v7nb\" (UID: \"b16e724b-3b5e-46e6-bb8a-1aebd631d549\") " pod="metallb-system/speaker-2v7nb" Sep 29 19:21:00 crc kubenswrapper[4779]: I0929 19:21:00.739973 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/b16e724b-3b5e-46e6-bb8a-1aebd631d549-memberlist\") pod \"speaker-2v7nb\" (UID: \"b16e724b-3b5e-46e6-bb8a-1aebd631d549\") " pod="metallb-system/speaker-2v7nb" Sep 29 19:21:00 crc kubenswrapper[4779]: I0929 19:21:00.838151 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/speaker-2v7nb" Sep 29 19:21:00 crc kubenswrapper[4779]: W0929 19:21:00.862405 4779 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb16e724b_3b5e_46e6_bb8a_1aebd631d549.slice/crio-6986e324e8b330de23b120e9a5ed91c4944dfbe88a0aa5ba5ca0c703139f2aa4 WatchSource:0}: Error finding container 6986e324e8b330de23b120e9a5ed91c4944dfbe88a0aa5ba5ca0c703139f2aa4: Status 404 returned error can't find the container with id 6986e324e8b330de23b120e9a5ed91c4944dfbe88a0aa5ba5ca0c703139f2aa4 Sep 29 19:21:01 crc kubenswrapper[4779]: I0929 19:21:01.381507 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-2v7nb" event={"ID":"b16e724b-3b5e-46e6-bb8a-1aebd631d549","Type":"ContainerStarted","Data":"f8289412a6aa65f97d740b0d32ca184601dd415dde4e02868f59a4acab227de1"} Sep 29 19:21:01 crc kubenswrapper[4779]: I0929 19:21:01.381786 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-2v7nb" event={"ID":"b16e724b-3b5e-46e6-bb8a-1aebd631d549","Type":"ContainerStarted","Data":"6986e324e8b330de23b120e9a5ed91c4944dfbe88a0aa5ba5ca0c703139f2aa4"} Sep 29 19:21:02 crc kubenswrapper[4779]: I0929 19:21:02.389736 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-2v7nb" event={"ID":"b16e724b-3b5e-46e6-bb8a-1aebd631d549","Type":"ContainerStarted","Data":"feab3e0f7fb9abf57d13c451cf63c8a8e9be1c14010395928f3a36f99299de0f"} Sep 29 19:21:02 crc kubenswrapper[4779]: I0929 19:21:02.389871 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/speaker-2v7nb" Sep 29 19:21:02 crc kubenswrapper[4779]: I0929 19:21:02.409576 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/speaker-2v7nb" podStartSLOduration=4.409549303 podStartE2EDuration="4.409549303s" podCreationTimestamp="2025-09-29 19:20:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 19:21:02.408286499 +0000 UTC m=+773.292711639" watchObservedRunningTime="2025-09-29 19:21:02.409549303 +0000 UTC m=+773.293974403" Sep 29 19:21:07 crc kubenswrapper[4779]: I0929 19:21:07.430685 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-webhook-server-5478bdb765-6f4p6" event={"ID":"09b83c31-96ba-457f-9385-7a124ddbc54d","Type":"ContainerStarted","Data":"2ff74b04bec1d23a92dbe77d7d42a4353b501c0a441d7b299118cbfc6b47709d"} Sep 29 19:21:07 crc kubenswrapper[4779]: I0929 19:21:07.431277 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/frr-k8s-webhook-server-5478bdb765-6f4p6" Sep 29 19:21:07 crc kubenswrapper[4779]: I0929 19:21:07.433491 4779 generic.go:334] "Generic (PLEG): container finished" podID="be71c6d8-ba0b-46da-a558-f6c91cfddd59" containerID="ad4cfa76d9a5a9eedaf9ddec7a831831803d5f7eddf8b7e71fe815a0047fb202" exitCode=0 Sep 29 19:21:07 crc kubenswrapper[4779]: I0929 19:21:07.433543 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-4lq9x" event={"ID":"be71c6d8-ba0b-46da-a558-f6c91cfddd59","Type":"ContainerDied","Data":"ad4cfa76d9a5a9eedaf9ddec7a831831803d5f7eddf8b7e71fe815a0047fb202"} Sep 29 19:21:07 crc kubenswrapper[4779]: I0929 19:21:07.461708 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/frr-k8s-webhook-server-5478bdb765-6f4p6" podStartSLOduration=2.890029386 podStartE2EDuration="9.461679121s" podCreationTimestamp="2025-09-29 19:20:58 +0000 UTC" firstStartedPulling="2025-09-29 19:20:59.753907476 +0000 UTC m=+770.638332576" lastFinishedPulling="2025-09-29 19:21:06.325557211 +0000 UTC m=+777.209982311" observedRunningTime="2025-09-29 19:21:07.453365725 +0000 UTC m=+778.337790915" watchObservedRunningTime="2025-09-29 19:21:07.461679121 +0000 UTC m=+778.346104251" Sep 29 19:21:08 crc kubenswrapper[4779]: I0929 19:21:08.441827 4779 generic.go:334] "Generic (PLEG): container finished" podID="be71c6d8-ba0b-46da-a558-f6c91cfddd59" containerID="94e0440ace5117055c11d49625c6b88c500330668ded9b5bc29fd7ff01ef8d05" exitCode=0 Sep 29 19:21:08 crc kubenswrapper[4779]: I0929 19:21:08.441939 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-4lq9x" event={"ID":"be71c6d8-ba0b-46da-a558-f6c91cfddd59","Type":"ContainerDied","Data":"94e0440ace5117055c11d49625c6b88c500330668ded9b5bc29fd7ff01ef8d05"} Sep 29 19:21:09 crc kubenswrapper[4779]: I0929 19:21:09.451331 4779 generic.go:334] "Generic (PLEG): container finished" podID="be71c6d8-ba0b-46da-a558-f6c91cfddd59" containerID="36d65f7027ab912555582a10683601b446ae316a1e014b006e8b551cece39902" exitCode=0 Sep 29 19:21:09 crc kubenswrapper[4779]: I0929 19:21:09.451388 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-4lq9x" event={"ID":"be71c6d8-ba0b-46da-a558-f6c91cfddd59","Type":"ContainerDied","Data":"36d65f7027ab912555582a10683601b446ae316a1e014b006e8b551cece39902"} Sep 29 19:21:10 crc kubenswrapper[4779]: I0929 19:21:10.464528 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-4lq9x" event={"ID":"be71c6d8-ba0b-46da-a558-f6c91cfddd59","Type":"ContainerStarted","Data":"f10c32450b6e0259b19959f5631266a5615b6c096c6596ffe8d9cdb9acabdbe7"} Sep 29 19:21:10 crc kubenswrapper[4779]: I0929 19:21:10.465414 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-4lq9x" event={"ID":"be71c6d8-ba0b-46da-a558-f6c91cfddd59","Type":"ContainerStarted","Data":"344ea1367a606dd83aaecfae1d0371a130411a6f1dbeb26f4dd2cdae368e0838"} Sep 29 19:21:10 crc kubenswrapper[4779]: I0929 19:21:10.465440 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-4lq9x" event={"ID":"be71c6d8-ba0b-46da-a558-f6c91cfddd59","Type":"ContainerStarted","Data":"63f96ba5d3e0a0661ba6bea6b07fdabfe69617b4ae201fdcc86473fb2a45e546"} Sep 29 19:21:10 crc kubenswrapper[4779]: I0929 19:21:10.465459 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-4lq9x" event={"ID":"be71c6d8-ba0b-46da-a558-f6c91cfddd59","Type":"ContainerStarted","Data":"865b4288eeabda504a0571bb0d5ebb543fe0537f12ce9bff58884bc82f51b19d"} Sep 29 19:21:10 crc kubenswrapper[4779]: I0929 19:21:10.465478 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-4lq9x" event={"ID":"be71c6d8-ba0b-46da-a558-f6c91cfddd59","Type":"ContainerStarted","Data":"04a3c7c62883d1c5e4b6bc4f4b39148b0453353778c634b7644292cac37d8b42"} Sep 29 19:21:11 crc kubenswrapper[4779]: I0929 19:21:11.480030 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-4lq9x" event={"ID":"be71c6d8-ba0b-46da-a558-f6c91cfddd59","Type":"ContainerStarted","Data":"687ffde9588a9992cf3c27bb28172a55242fd6f0532d28704e64d038b41853e6"} Sep 29 19:21:11 crc kubenswrapper[4779]: I0929 19:21:11.480245 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/frr-k8s-4lq9x" Sep 29 19:21:11 crc kubenswrapper[4779]: I0929 19:21:11.507720 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/frr-k8s-4lq9x" podStartSLOduration=7.166935861 podStartE2EDuration="13.50768938s" podCreationTimestamp="2025-09-29 19:20:58 +0000 UTC" firstStartedPulling="2025-09-29 19:21:00.010670816 +0000 UTC m=+770.895095926" lastFinishedPulling="2025-09-29 19:21:06.351424345 +0000 UTC m=+777.235849445" observedRunningTime="2025-09-29 19:21:11.50401875 +0000 UTC m=+782.388443900" watchObservedRunningTime="2025-09-29 19:21:11.50768938 +0000 UTC m=+782.392114490" Sep 29 19:21:13 crc kubenswrapper[4779]: I0929 19:21:13.784815 4779 patch_prober.go:28] interesting pod/machine-config-daemon-d5cnr container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 19:21:13 crc kubenswrapper[4779]: I0929 19:21:13.785244 4779 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" podUID="476bc421-1113-455e-bcc8-e207e47dad19" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 19:21:14 crc kubenswrapper[4779]: I0929 19:21:14.854013 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="metallb-system/frr-k8s-4lq9x" Sep 29 19:21:14 crc kubenswrapper[4779]: I0929 19:21:14.924215 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="metallb-system/frr-k8s-4lq9x" Sep 29 19:21:19 crc kubenswrapper[4779]: I0929 19:21:19.246225 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/frr-k8s-webhook-server-5478bdb765-6f4p6" Sep 29 19:21:19 crc kubenswrapper[4779]: I0929 19:21:19.356417 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/controller-5d688f5ffc-l5gr5" Sep 29 19:21:19 crc kubenswrapper[4779]: I0929 19:21:19.858017 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/frr-k8s-4lq9x" Sep 29 19:21:20 crc kubenswrapper[4779]: I0929 19:21:20.843607 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/speaker-2v7nb" Sep 29 19:21:23 crc kubenswrapper[4779]: I0929 19:21:23.914598 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-index-wgz8r"] Sep 29 19:21:23 crc kubenswrapper[4779]: I0929 19:21:23.916278 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-wgz8r" Sep 29 19:21:23 crc kubenswrapper[4779]: I0929 19:21:23.920231 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"kube-root-ca.crt" Sep 29 19:21:23 crc kubenswrapper[4779]: I0929 19:21:23.922623 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"openshift-service-ca.crt" Sep 29 19:21:23 crc kubenswrapper[4779]: I0929 19:21:23.936118 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-wgz8r"] Sep 29 19:21:24 crc kubenswrapper[4779]: I0929 19:21:24.061198 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kcjjw\" (UniqueName: \"kubernetes.io/projected/d54511bc-7ba7-41e6-b726-a2ed0ec278e1-kube-api-access-kcjjw\") pod \"openstack-operator-index-wgz8r\" (UID: \"d54511bc-7ba7-41e6-b726-a2ed0ec278e1\") " pod="openstack-operators/openstack-operator-index-wgz8r" Sep 29 19:21:24 crc kubenswrapper[4779]: I0929 19:21:24.162357 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kcjjw\" (UniqueName: \"kubernetes.io/projected/d54511bc-7ba7-41e6-b726-a2ed0ec278e1-kube-api-access-kcjjw\") pod \"openstack-operator-index-wgz8r\" (UID: \"d54511bc-7ba7-41e6-b726-a2ed0ec278e1\") " pod="openstack-operators/openstack-operator-index-wgz8r" Sep 29 19:21:24 crc kubenswrapper[4779]: I0929 19:21:24.182753 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kcjjw\" (UniqueName: \"kubernetes.io/projected/d54511bc-7ba7-41e6-b726-a2ed0ec278e1-kube-api-access-kcjjw\") pod \"openstack-operator-index-wgz8r\" (UID: \"d54511bc-7ba7-41e6-b726-a2ed0ec278e1\") " pod="openstack-operators/openstack-operator-index-wgz8r" Sep 29 19:21:24 crc kubenswrapper[4779]: I0929 19:21:24.252666 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-wgz8r" Sep 29 19:21:24 crc kubenswrapper[4779]: I0929 19:21:24.732186 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-wgz8r"] Sep 29 19:21:24 crc kubenswrapper[4779]: W0929 19:21:24.738161 4779 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd54511bc_7ba7_41e6_b726_a2ed0ec278e1.slice/crio-72ac168e2129126a01eec4c54458632d45897b7cb66b143cc09c7cc12e6ae625 WatchSource:0}: Error finding container 72ac168e2129126a01eec4c54458632d45897b7cb66b143cc09c7cc12e6ae625: Status 404 returned error can't find the container with id 72ac168e2129126a01eec4c54458632d45897b7cb66b143cc09c7cc12e6ae625 Sep 29 19:21:25 crc kubenswrapper[4779]: I0929 19:21:25.589360 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-wgz8r" event={"ID":"d54511bc-7ba7-41e6-b726-a2ed0ec278e1","Type":"ContainerStarted","Data":"72ac168e2129126a01eec4c54458632d45897b7cb66b143cc09c7cc12e6ae625"} Sep 29 19:21:27 crc kubenswrapper[4779]: I0929 19:21:27.281794 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/openstack-operator-index-wgz8r"] Sep 29 19:21:27 crc kubenswrapper[4779]: I0929 19:21:27.602006 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-wgz8r" event={"ID":"d54511bc-7ba7-41e6-b726-a2ed0ec278e1","Type":"ContainerStarted","Data":"a0fb6549fc3658942ea21b71d6744527090cfd8ee653a2a699b2647bf9c7235a"} Sep 29 19:21:27 crc kubenswrapper[4779]: I0929 19:21:27.602133 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/openstack-operator-index-wgz8r" podUID="d54511bc-7ba7-41e6-b726-a2ed0ec278e1" containerName="registry-server" containerID="cri-o://a0fb6549fc3658942ea21b71d6744527090cfd8ee653a2a699b2647bf9c7235a" gracePeriod=2 Sep 29 19:21:27 crc kubenswrapper[4779]: I0929 19:21:27.618804 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-index-wgz8r" podStartSLOduration=1.975849707 podStartE2EDuration="4.618786078s" podCreationTimestamp="2025-09-29 19:21:23 +0000 UTC" firstStartedPulling="2025-09-29 19:21:24.741550749 +0000 UTC m=+795.625975859" lastFinishedPulling="2025-09-29 19:21:27.38448713 +0000 UTC m=+798.268912230" observedRunningTime="2025-09-29 19:21:27.615239341 +0000 UTC m=+798.499664461" watchObservedRunningTime="2025-09-29 19:21:27.618786078 +0000 UTC m=+798.503211178" Sep 29 19:21:27 crc kubenswrapper[4779]: I0929 19:21:27.908137 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-index-pwzpf"] Sep 29 19:21:27 crc kubenswrapper[4779]: I0929 19:21:27.909354 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-pwzpf" Sep 29 19:21:27 crc kubenswrapper[4779]: I0929 19:21:27.912810 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-pwzpf"] Sep 29 19:21:27 crc kubenswrapper[4779]: I0929 19:21:27.913111 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-index-dockercfg-8rggp" Sep 29 19:21:27 crc kubenswrapper[4779]: I0929 19:21:27.917033 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nknjt\" (UniqueName: \"kubernetes.io/projected/96bf8594-ea42-4a66-baa7-39679ec2aa5d-kube-api-access-nknjt\") pod \"openstack-operator-index-pwzpf\" (UID: \"96bf8594-ea42-4a66-baa7-39679ec2aa5d\") " pod="openstack-operators/openstack-operator-index-pwzpf" Sep 29 19:21:28 crc kubenswrapper[4779]: I0929 19:21:28.018347 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nknjt\" (UniqueName: \"kubernetes.io/projected/96bf8594-ea42-4a66-baa7-39679ec2aa5d-kube-api-access-nknjt\") pod \"openstack-operator-index-pwzpf\" (UID: \"96bf8594-ea42-4a66-baa7-39679ec2aa5d\") " pod="openstack-operators/openstack-operator-index-pwzpf" Sep 29 19:21:28 crc kubenswrapper[4779]: I0929 19:21:28.029447 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-wgz8r" Sep 29 19:21:28 crc kubenswrapper[4779]: I0929 19:21:28.045734 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nknjt\" (UniqueName: \"kubernetes.io/projected/96bf8594-ea42-4a66-baa7-39679ec2aa5d-kube-api-access-nknjt\") pod \"openstack-operator-index-pwzpf\" (UID: \"96bf8594-ea42-4a66-baa7-39679ec2aa5d\") " pod="openstack-operators/openstack-operator-index-pwzpf" Sep 29 19:21:28 crc kubenswrapper[4779]: I0929 19:21:28.220838 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kcjjw\" (UniqueName: \"kubernetes.io/projected/d54511bc-7ba7-41e6-b726-a2ed0ec278e1-kube-api-access-kcjjw\") pod \"d54511bc-7ba7-41e6-b726-a2ed0ec278e1\" (UID: \"d54511bc-7ba7-41e6-b726-a2ed0ec278e1\") " Sep 29 19:21:28 crc kubenswrapper[4779]: I0929 19:21:28.226160 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d54511bc-7ba7-41e6-b726-a2ed0ec278e1-kube-api-access-kcjjw" (OuterVolumeSpecName: "kube-api-access-kcjjw") pod "d54511bc-7ba7-41e6-b726-a2ed0ec278e1" (UID: "d54511bc-7ba7-41e6-b726-a2ed0ec278e1"). InnerVolumeSpecName "kube-api-access-kcjjw". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:21:28 crc kubenswrapper[4779]: I0929 19:21:28.239827 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-pwzpf" Sep 29 19:21:28 crc kubenswrapper[4779]: I0929 19:21:28.322574 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kcjjw\" (UniqueName: \"kubernetes.io/projected/d54511bc-7ba7-41e6-b726-a2ed0ec278e1-kube-api-access-kcjjw\") on node \"crc\" DevicePath \"\"" Sep 29 19:21:28 crc kubenswrapper[4779]: I0929 19:21:28.614607 4779 generic.go:334] "Generic (PLEG): container finished" podID="d54511bc-7ba7-41e6-b726-a2ed0ec278e1" containerID="a0fb6549fc3658942ea21b71d6744527090cfd8ee653a2a699b2647bf9c7235a" exitCode=0 Sep 29 19:21:28 crc kubenswrapper[4779]: I0929 19:21:28.614724 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-wgz8r" Sep 29 19:21:28 crc kubenswrapper[4779]: I0929 19:21:28.614766 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-wgz8r" event={"ID":"d54511bc-7ba7-41e6-b726-a2ed0ec278e1","Type":"ContainerDied","Data":"a0fb6549fc3658942ea21b71d6744527090cfd8ee653a2a699b2647bf9c7235a"} Sep 29 19:21:28 crc kubenswrapper[4779]: I0929 19:21:28.614858 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-wgz8r" event={"ID":"d54511bc-7ba7-41e6-b726-a2ed0ec278e1","Type":"ContainerDied","Data":"72ac168e2129126a01eec4c54458632d45897b7cb66b143cc09c7cc12e6ae625"} Sep 29 19:21:28 crc kubenswrapper[4779]: I0929 19:21:28.614895 4779 scope.go:117] "RemoveContainer" containerID="a0fb6549fc3658942ea21b71d6744527090cfd8ee653a2a699b2647bf9c7235a" Sep 29 19:21:28 crc kubenswrapper[4779]: I0929 19:21:28.643658 4779 scope.go:117] "RemoveContainer" containerID="a0fb6549fc3658942ea21b71d6744527090cfd8ee653a2a699b2647bf9c7235a" Sep 29 19:21:28 crc kubenswrapper[4779]: E0929 19:21:28.645052 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a0fb6549fc3658942ea21b71d6744527090cfd8ee653a2a699b2647bf9c7235a\": container with ID starting with a0fb6549fc3658942ea21b71d6744527090cfd8ee653a2a699b2647bf9c7235a not found: ID does not exist" containerID="a0fb6549fc3658942ea21b71d6744527090cfd8ee653a2a699b2647bf9c7235a" Sep 29 19:21:28 crc kubenswrapper[4779]: I0929 19:21:28.645141 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a0fb6549fc3658942ea21b71d6744527090cfd8ee653a2a699b2647bf9c7235a"} err="failed to get container status \"a0fb6549fc3658942ea21b71d6744527090cfd8ee653a2a699b2647bf9c7235a\": rpc error: code = NotFound desc = could not find container \"a0fb6549fc3658942ea21b71d6744527090cfd8ee653a2a699b2647bf9c7235a\": container with ID starting with a0fb6549fc3658942ea21b71d6744527090cfd8ee653a2a699b2647bf9c7235a not found: ID does not exist" Sep 29 19:21:28 crc kubenswrapper[4779]: I0929 19:21:28.661471 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/openstack-operator-index-wgz8r"] Sep 29 19:21:28 crc kubenswrapper[4779]: I0929 19:21:28.668176 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/openstack-operator-index-wgz8r"] Sep 29 19:21:28 crc kubenswrapper[4779]: I0929 19:21:28.750977 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-pwzpf"] Sep 29 19:21:29 crc kubenswrapper[4779]: I0929 19:21:29.625959 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-pwzpf" event={"ID":"96bf8594-ea42-4a66-baa7-39679ec2aa5d","Type":"ContainerStarted","Data":"421d0f68c9b01430e17cb359e1d1bd90dba3388d5f50a167888c42e0b2dd1012"} Sep 29 19:21:29 crc kubenswrapper[4779]: I0929 19:21:29.626031 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-pwzpf" event={"ID":"96bf8594-ea42-4a66-baa7-39679ec2aa5d","Type":"ContainerStarted","Data":"f8cb764553309e6abbb1bababfd1e61718433753eaafc37cb3a511ff799cef0e"} Sep 29 19:21:29 crc kubenswrapper[4779]: I0929 19:21:29.657600 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-index-pwzpf" podStartSLOduration=2.591371819 podStartE2EDuration="2.657568642s" podCreationTimestamp="2025-09-29 19:21:27 +0000 UTC" firstStartedPulling="2025-09-29 19:21:28.765397443 +0000 UTC m=+799.649822573" lastFinishedPulling="2025-09-29 19:21:28.831594256 +0000 UTC m=+799.716019396" observedRunningTime="2025-09-29 19:21:29.647545689 +0000 UTC m=+800.531970819" watchObservedRunningTime="2025-09-29 19:21:29.657568642 +0000 UTC m=+800.541993772" Sep 29 19:21:29 crc kubenswrapper[4779]: I0929 19:21:29.781424 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d54511bc-7ba7-41e6-b726-a2ed0ec278e1" path="/var/lib/kubelet/pods/d54511bc-7ba7-41e6-b726-a2ed0ec278e1/volumes" Sep 29 19:21:35 crc kubenswrapper[4779]: I0929 19:21:35.497226 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-hskt5"] Sep 29 19:21:35 crc kubenswrapper[4779]: E0929 19:21:35.498162 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d54511bc-7ba7-41e6-b726-a2ed0ec278e1" containerName="registry-server" Sep 29 19:21:35 crc kubenswrapper[4779]: I0929 19:21:35.498182 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="d54511bc-7ba7-41e6-b726-a2ed0ec278e1" containerName="registry-server" Sep 29 19:21:35 crc kubenswrapper[4779]: I0929 19:21:35.498406 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="d54511bc-7ba7-41e6-b726-a2ed0ec278e1" containerName="registry-server" Sep 29 19:21:35 crc kubenswrapper[4779]: I0929 19:21:35.499734 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-hskt5" Sep 29 19:21:35 crc kubenswrapper[4779]: I0929 19:21:35.516296 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-hskt5"] Sep 29 19:21:35 crc kubenswrapper[4779]: I0929 19:21:35.544028 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d4j4q\" (UniqueName: \"kubernetes.io/projected/1ea5edcd-7ee4-4567-9747-0452a52a8211-kube-api-access-d4j4q\") pod \"redhat-marketplace-hskt5\" (UID: \"1ea5edcd-7ee4-4567-9747-0452a52a8211\") " pod="openshift-marketplace/redhat-marketplace-hskt5" Sep 29 19:21:35 crc kubenswrapper[4779]: I0929 19:21:35.544256 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1ea5edcd-7ee4-4567-9747-0452a52a8211-catalog-content\") pod \"redhat-marketplace-hskt5\" (UID: \"1ea5edcd-7ee4-4567-9747-0452a52a8211\") " pod="openshift-marketplace/redhat-marketplace-hskt5" Sep 29 19:21:35 crc kubenswrapper[4779]: I0929 19:21:35.544369 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1ea5edcd-7ee4-4567-9747-0452a52a8211-utilities\") pod \"redhat-marketplace-hskt5\" (UID: \"1ea5edcd-7ee4-4567-9747-0452a52a8211\") " pod="openshift-marketplace/redhat-marketplace-hskt5" Sep 29 19:21:35 crc kubenswrapper[4779]: I0929 19:21:35.645754 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1ea5edcd-7ee4-4567-9747-0452a52a8211-catalog-content\") pod \"redhat-marketplace-hskt5\" (UID: \"1ea5edcd-7ee4-4567-9747-0452a52a8211\") " pod="openshift-marketplace/redhat-marketplace-hskt5" Sep 29 19:21:35 crc kubenswrapper[4779]: I0929 19:21:35.645809 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1ea5edcd-7ee4-4567-9747-0452a52a8211-utilities\") pod \"redhat-marketplace-hskt5\" (UID: \"1ea5edcd-7ee4-4567-9747-0452a52a8211\") " pod="openshift-marketplace/redhat-marketplace-hskt5" Sep 29 19:21:35 crc kubenswrapper[4779]: I0929 19:21:35.645850 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d4j4q\" (UniqueName: \"kubernetes.io/projected/1ea5edcd-7ee4-4567-9747-0452a52a8211-kube-api-access-d4j4q\") pod \"redhat-marketplace-hskt5\" (UID: \"1ea5edcd-7ee4-4567-9747-0452a52a8211\") " pod="openshift-marketplace/redhat-marketplace-hskt5" Sep 29 19:21:35 crc kubenswrapper[4779]: I0929 19:21:35.647398 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1ea5edcd-7ee4-4567-9747-0452a52a8211-catalog-content\") pod \"redhat-marketplace-hskt5\" (UID: \"1ea5edcd-7ee4-4567-9747-0452a52a8211\") " pod="openshift-marketplace/redhat-marketplace-hskt5" Sep 29 19:21:35 crc kubenswrapper[4779]: I0929 19:21:35.647472 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1ea5edcd-7ee4-4567-9747-0452a52a8211-utilities\") pod \"redhat-marketplace-hskt5\" (UID: \"1ea5edcd-7ee4-4567-9747-0452a52a8211\") " pod="openshift-marketplace/redhat-marketplace-hskt5" Sep 29 19:21:35 crc kubenswrapper[4779]: I0929 19:21:35.671778 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d4j4q\" (UniqueName: \"kubernetes.io/projected/1ea5edcd-7ee4-4567-9747-0452a52a8211-kube-api-access-d4j4q\") pod \"redhat-marketplace-hskt5\" (UID: \"1ea5edcd-7ee4-4567-9747-0452a52a8211\") " pod="openshift-marketplace/redhat-marketplace-hskt5" Sep 29 19:21:35 crc kubenswrapper[4779]: I0929 19:21:35.820614 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-hskt5" Sep 29 19:21:36 crc kubenswrapper[4779]: I0929 19:21:36.283897 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-hskt5"] Sep 29 19:21:36 crc kubenswrapper[4779]: W0929 19:21:36.284452 4779 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1ea5edcd_7ee4_4567_9747_0452a52a8211.slice/crio-c0e9c07dea0c2efb56e0668324b200ee31b1e6089f4ea05dadfcf980308d3c93 WatchSource:0}: Error finding container c0e9c07dea0c2efb56e0668324b200ee31b1e6089f4ea05dadfcf980308d3c93: Status 404 returned error can't find the container with id c0e9c07dea0c2efb56e0668324b200ee31b1e6089f4ea05dadfcf980308d3c93 Sep 29 19:21:36 crc kubenswrapper[4779]: I0929 19:21:36.679977 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-hskt5" event={"ID":"1ea5edcd-7ee4-4567-9747-0452a52a8211","Type":"ContainerStarted","Data":"c0e9c07dea0c2efb56e0668324b200ee31b1e6089f4ea05dadfcf980308d3c93"} Sep 29 19:21:37 crc kubenswrapper[4779]: I0929 19:21:37.691087 4779 generic.go:334] "Generic (PLEG): container finished" podID="1ea5edcd-7ee4-4567-9747-0452a52a8211" containerID="601fe8ffecd03246100a8ba32def6d92f685a620eea08e25d9a01a4a1ffd6148" exitCode=0 Sep 29 19:21:37 crc kubenswrapper[4779]: I0929 19:21:37.691440 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-hskt5" event={"ID":"1ea5edcd-7ee4-4567-9747-0452a52a8211","Type":"ContainerDied","Data":"601fe8ffecd03246100a8ba32def6d92f685a620eea08e25d9a01a4a1ffd6148"} Sep 29 19:21:38 crc kubenswrapper[4779]: I0929 19:21:38.240625 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-operators/openstack-operator-index-pwzpf" Sep 29 19:21:38 crc kubenswrapper[4779]: I0929 19:21:38.240707 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-index-pwzpf" Sep 29 19:21:38 crc kubenswrapper[4779]: I0929 19:21:38.288191 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-operators/openstack-operator-index-pwzpf" Sep 29 19:21:38 crc kubenswrapper[4779]: I0929 19:21:38.740385 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-index-pwzpf" Sep 29 19:21:39 crc kubenswrapper[4779]: I0929 19:21:39.718751 4779 generic.go:334] "Generic (PLEG): container finished" podID="1ea5edcd-7ee4-4567-9747-0452a52a8211" containerID="3b06f8a382a2c31f14f350270b4065360298b89113da0a97b220aeb616ba4138" exitCode=0 Sep 29 19:21:39 crc kubenswrapper[4779]: I0929 19:21:39.718845 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-hskt5" event={"ID":"1ea5edcd-7ee4-4567-9747-0452a52a8211","Type":"ContainerDied","Data":"3b06f8a382a2c31f14f350270b4065360298b89113da0a97b220aeb616ba4138"} Sep 29 19:21:40 crc kubenswrapper[4779]: I0929 19:21:40.732554 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-hskt5" event={"ID":"1ea5edcd-7ee4-4567-9747-0452a52a8211","Type":"ContainerStarted","Data":"68cf25ce7a9b4e5e81ffc0e0ef16e1832eeeb519224742228e88d276437201c4"} Sep 29 19:21:40 crc kubenswrapper[4779]: I0929 19:21:40.757195 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-hskt5" podStartSLOduration=3.2987022169999998 podStartE2EDuration="5.757175586s" podCreationTimestamp="2025-09-29 19:21:35 +0000 UTC" firstStartedPulling="2025-09-29 19:21:37.694247062 +0000 UTC m=+808.578672202" lastFinishedPulling="2025-09-29 19:21:40.152720461 +0000 UTC m=+811.037145571" observedRunningTime="2025-09-29 19:21:40.755510991 +0000 UTC m=+811.639936121" watchObservedRunningTime="2025-09-29 19:21:40.757175586 +0000 UTC m=+811.641600686" Sep 29 19:21:43 crc kubenswrapper[4779]: I0929 19:21:43.785636 4779 patch_prober.go:28] interesting pod/machine-config-daemon-d5cnr container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 19:21:43 crc kubenswrapper[4779]: I0929 19:21:43.786049 4779 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" podUID="476bc421-1113-455e-bcc8-e207e47dad19" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 19:21:44 crc kubenswrapper[4779]: I0929 19:21:44.753034 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/b0ce77c3c22bafa9d9845daaf088adfa5ab8db9a8342ffc3a6e0496b69h87gh"] Sep 29 19:21:44 crc kubenswrapper[4779]: I0929 19:21:44.756203 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/b0ce77c3c22bafa9d9845daaf088adfa5ab8db9a8342ffc3a6e0496b69h87gh" Sep 29 19:21:44 crc kubenswrapper[4779]: I0929 19:21:44.758524 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"default-dockercfg-5xqz8" Sep 29 19:21:44 crc kubenswrapper[4779]: I0929 19:21:44.771104 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/b0ce77c3c22bafa9d9845daaf088adfa5ab8db9a8342ffc3a6e0496b69h87gh"] Sep 29 19:21:44 crc kubenswrapper[4779]: I0929 19:21:44.784830 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h6fcq\" (UniqueName: \"kubernetes.io/projected/e83c2f7a-1165-4d7b-ba2f-ec47f6291cfd-kube-api-access-h6fcq\") pod \"b0ce77c3c22bafa9d9845daaf088adfa5ab8db9a8342ffc3a6e0496b69h87gh\" (UID: \"e83c2f7a-1165-4d7b-ba2f-ec47f6291cfd\") " pod="openstack-operators/b0ce77c3c22bafa9d9845daaf088adfa5ab8db9a8342ffc3a6e0496b69h87gh" Sep 29 19:21:44 crc kubenswrapper[4779]: I0929 19:21:44.784953 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/e83c2f7a-1165-4d7b-ba2f-ec47f6291cfd-util\") pod \"b0ce77c3c22bafa9d9845daaf088adfa5ab8db9a8342ffc3a6e0496b69h87gh\" (UID: \"e83c2f7a-1165-4d7b-ba2f-ec47f6291cfd\") " pod="openstack-operators/b0ce77c3c22bafa9d9845daaf088adfa5ab8db9a8342ffc3a6e0496b69h87gh" Sep 29 19:21:44 crc kubenswrapper[4779]: I0929 19:21:44.784999 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/e83c2f7a-1165-4d7b-ba2f-ec47f6291cfd-bundle\") pod \"b0ce77c3c22bafa9d9845daaf088adfa5ab8db9a8342ffc3a6e0496b69h87gh\" (UID: \"e83c2f7a-1165-4d7b-ba2f-ec47f6291cfd\") " pod="openstack-operators/b0ce77c3c22bafa9d9845daaf088adfa5ab8db9a8342ffc3a6e0496b69h87gh" Sep 29 19:21:44 crc kubenswrapper[4779]: I0929 19:21:44.885844 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/e83c2f7a-1165-4d7b-ba2f-ec47f6291cfd-util\") pod \"b0ce77c3c22bafa9d9845daaf088adfa5ab8db9a8342ffc3a6e0496b69h87gh\" (UID: \"e83c2f7a-1165-4d7b-ba2f-ec47f6291cfd\") " pod="openstack-operators/b0ce77c3c22bafa9d9845daaf088adfa5ab8db9a8342ffc3a6e0496b69h87gh" Sep 29 19:21:44 crc kubenswrapper[4779]: I0929 19:21:44.885886 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/e83c2f7a-1165-4d7b-ba2f-ec47f6291cfd-bundle\") pod \"b0ce77c3c22bafa9d9845daaf088adfa5ab8db9a8342ffc3a6e0496b69h87gh\" (UID: \"e83c2f7a-1165-4d7b-ba2f-ec47f6291cfd\") " pod="openstack-operators/b0ce77c3c22bafa9d9845daaf088adfa5ab8db9a8342ffc3a6e0496b69h87gh" Sep 29 19:21:44 crc kubenswrapper[4779]: I0929 19:21:44.885956 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h6fcq\" (UniqueName: \"kubernetes.io/projected/e83c2f7a-1165-4d7b-ba2f-ec47f6291cfd-kube-api-access-h6fcq\") pod \"b0ce77c3c22bafa9d9845daaf088adfa5ab8db9a8342ffc3a6e0496b69h87gh\" (UID: \"e83c2f7a-1165-4d7b-ba2f-ec47f6291cfd\") " pod="openstack-operators/b0ce77c3c22bafa9d9845daaf088adfa5ab8db9a8342ffc3a6e0496b69h87gh" Sep 29 19:21:44 crc kubenswrapper[4779]: I0929 19:21:44.886625 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/e83c2f7a-1165-4d7b-ba2f-ec47f6291cfd-util\") pod \"b0ce77c3c22bafa9d9845daaf088adfa5ab8db9a8342ffc3a6e0496b69h87gh\" (UID: \"e83c2f7a-1165-4d7b-ba2f-ec47f6291cfd\") " pod="openstack-operators/b0ce77c3c22bafa9d9845daaf088adfa5ab8db9a8342ffc3a6e0496b69h87gh" Sep 29 19:21:44 crc kubenswrapper[4779]: I0929 19:21:44.886761 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/e83c2f7a-1165-4d7b-ba2f-ec47f6291cfd-bundle\") pod \"b0ce77c3c22bafa9d9845daaf088adfa5ab8db9a8342ffc3a6e0496b69h87gh\" (UID: \"e83c2f7a-1165-4d7b-ba2f-ec47f6291cfd\") " pod="openstack-operators/b0ce77c3c22bafa9d9845daaf088adfa5ab8db9a8342ffc3a6e0496b69h87gh" Sep 29 19:21:44 crc kubenswrapper[4779]: I0929 19:21:44.921065 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h6fcq\" (UniqueName: \"kubernetes.io/projected/e83c2f7a-1165-4d7b-ba2f-ec47f6291cfd-kube-api-access-h6fcq\") pod \"b0ce77c3c22bafa9d9845daaf088adfa5ab8db9a8342ffc3a6e0496b69h87gh\" (UID: \"e83c2f7a-1165-4d7b-ba2f-ec47f6291cfd\") " pod="openstack-operators/b0ce77c3c22bafa9d9845daaf088adfa5ab8db9a8342ffc3a6e0496b69h87gh" Sep 29 19:21:45 crc kubenswrapper[4779]: I0929 19:21:45.083755 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/b0ce77c3c22bafa9d9845daaf088adfa5ab8db9a8342ffc3a6e0496b69h87gh" Sep 29 19:21:45 crc kubenswrapper[4779]: I0929 19:21:45.554686 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/b0ce77c3c22bafa9d9845daaf088adfa5ab8db9a8342ffc3a6e0496b69h87gh"] Sep 29 19:21:45 crc kubenswrapper[4779]: W0929 19:21:45.563511 4779 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode83c2f7a_1165_4d7b_ba2f_ec47f6291cfd.slice/crio-fd5128c643acf96e593316ae19dcbea1687ba5fa4e32a0dba9be33ab1a0f7579 WatchSource:0}: Error finding container fd5128c643acf96e593316ae19dcbea1687ba5fa4e32a0dba9be33ab1a0f7579: Status 404 returned error can't find the container with id fd5128c643acf96e593316ae19dcbea1687ba5fa4e32a0dba9be33ab1a0f7579 Sep 29 19:21:45 crc kubenswrapper[4779]: I0929 19:21:45.781392 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/b0ce77c3c22bafa9d9845daaf088adfa5ab8db9a8342ffc3a6e0496b69h87gh" event={"ID":"e83c2f7a-1165-4d7b-ba2f-ec47f6291cfd","Type":"ContainerStarted","Data":"7c9007f8fb631d51120a9742e0be272b42f5c8b09fb68d6fa2ac526e60a5051e"} Sep 29 19:21:45 crc kubenswrapper[4779]: I0929 19:21:45.781737 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/b0ce77c3c22bafa9d9845daaf088adfa5ab8db9a8342ffc3a6e0496b69h87gh" event={"ID":"e83c2f7a-1165-4d7b-ba2f-ec47f6291cfd","Type":"ContainerStarted","Data":"fd5128c643acf96e593316ae19dcbea1687ba5fa4e32a0dba9be33ab1a0f7579"} Sep 29 19:21:45 crc kubenswrapper[4779]: I0929 19:21:45.821129 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-hskt5" Sep 29 19:21:45 crc kubenswrapper[4779]: I0929 19:21:45.821194 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-hskt5" Sep 29 19:21:45 crc kubenswrapper[4779]: I0929 19:21:45.890114 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-hskt5" Sep 29 19:21:46 crc kubenswrapper[4779]: I0929 19:21:46.790852 4779 generic.go:334] "Generic (PLEG): container finished" podID="e83c2f7a-1165-4d7b-ba2f-ec47f6291cfd" containerID="7c9007f8fb631d51120a9742e0be272b42f5c8b09fb68d6fa2ac526e60a5051e" exitCode=0 Sep 29 19:21:46 crc kubenswrapper[4779]: I0929 19:21:46.791075 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/b0ce77c3c22bafa9d9845daaf088adfa5ab8db9a8342ffc3a6e0496b69h87gh" event={"ID":"e83c2f7a-1165-4d7b-ba2f-ec47f6291cfd","Type":"ContainerDied","Data":"7c9007f8fb631d51120a9742e0be272b42f5c8b09fb68d6fa2ac526e60a5051e"} Sep 29 19:21:46 crc kubenswrapper[4779]: I0929 19:21:46.865445 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-hskt5" Sep 29 19:21:47 crc kubenswrapper[4779]: I0929 19:21:47.802086 4779 generic.go:334] "Generic (PLEG): container finished" podID="e83c2f7a-1165-4d7b-ba2f-ec47f6291cfd" containerID="667fe428d50d0ef43ad6e54c6cc1442339380ec202bca54da9221ca11ea310bd" exitCode=0 Sep 29 19:21:47 crc kubenswrapper[4779]: I0929 19:21:47.802238 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/b0ce77c3c22bafa9d9845daaf088adfa5ab8db9a8342ffc3a6e0496b69h87gh" event={"ID":"e83c2f7a-1165-4d7b-ba2f-ec47f6291cfd","Type":"ContainerDied","Data":"667fe428d50d0ef43ad6e54c6cc1442339380ec202bca54da9221ca11ea310bd"} Sep 29 19:21:48 crc kubenswrapper[4779]: I0929 19:21:48.811269 4779 generic.go:334] "Generic (PLEG): container finished" podID="e83c2f7a-1165-4d7b-ba2f-ec47f6291cfd" containerID="a2ce57f90060505a06ce7828ffd42d9ba6902fefeb075ce5e0f6c1b88aaf7e44" exitCode=0 Sep 29 19:21:48 crc kubenswrapper[4779]: I0929 19:21:48.811345 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/b0ce77c3c22bafa9d9845daaf088adfa5ab8db9a8342ffc3a6e0496b69h87gh" event={"ID":"e83c2f7a-1165-4d7b-ba2f-ec47f6291cfd","Type":"ContainerDied","Data":"a2ce57f90060505a06ce7828ffd42d9ba6902fefeb075ce5e0f6c1b88aaf7e44"} Sep 29 19:21:49 crc kubenswrapper[4779]: I0929 19:21:49.484471 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-hskt5"] Sep 29 19:21:49 crc kubenswrapper[4779]: I0929 19:21:49.484774 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-hskt5" podUID="1ea5edcd-7ee4-4567-9747-0452a52a8211" containerName="registry-server" containerID="cri-o://68cf25ce7a9b4e5e81ffc0e0ef16e1832eeeb519224742228e88d276437201c4" gracePeriod=2 Sep 29 19:21:49 crc kubenswrapper[4779]: I0929 19:21:49.820706 4779 generic.go:334] "Generic (PLEG): container finished" podID="1ea5edcd-7ee4-4567-9747-0452a52a8211" containerID="68cf25ce7a9b4e5e81ffc0e0ef16e1832eeeb519224742228e88d276437201c4" exitCode=0 Sep 29 19:21:49 crc kubenswrapper[4779]: I0929 19:21:49.820959 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-hskt5" event={"ID":"1ea5edcd-7ee4-4567-9747-0452a52a8211","Type":"ContainerDied","Data":"68cf25ce7a9b4e5e81ffc0e0ef16e1832eeeb519224742228e88d276437201c4"} Sep 29 19:21:49 crc kubenswrapper[4779]: I0929 19:21:49.887227 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-hskt5" Sep 29 19:21:49 crc kubenswrapper[4779]: I0929 19:21:49.961619 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1ea5edcd-7ee4-4567-9747-0452a52a8211-utilities\") pod \"1ea5edcd-7ee4-4567-9747-0452a52a8211\" (UID: \"1ea5edcd-7ee4-4567-9747-0452a52a8211\") " Sep 29 19:21:49 crc kubenswrapper[4779]: I0929 19:21:49.961796 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1ea5edcd-7ee4-4567-9747-0452a52a8211-catalog-content\") pod \"1ea5edcd-7ee4-4567-9747-0452a52a8211\" (UID: \"1ea5edcd-7ee4-4567-9747-0452a52a8211\") " Sep 29 19:21:49 crc kubenswrapper[4779]: I0929 19:21:49.961834 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d4j4q\" (UniqueName: \"kubernetes.io/projected/1ea5edcd-7ee4-4567-9747-0452a52a8211-kube-api-access-d4j4q\") pod \"1ea5edcd-7ee4-4567-9747-0452a52a8211\" (UID: \"1ea5edcd-7ee4-4567-9747-0452a52a8211\") " Sep 29 19:21:49 crc kubenswrapper[4779]: I0929 19:21:49.963260 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1ea5edcd-7ee4-4567-9747-0452a52a8211-utilities" (OuterVolumeSpecName: "utilities") pod "1ea5edcd-7ee4-4567-9747-0452a52a8211" (UID: "1ea5edcd-7ee4-4567-9747-0452a52a8211"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 19:21:49 crc kubenswrapper[4779]: I0929 19:21:49.969374 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1ea5edcd-7ee4-4567-9747-0452a52a8211-kube-api-access-d4j4q" (OuterVolumeSpecName: "kube-api-access-d4j4q") pod "1ea5edcd-7ee4-4567-9747-0452a52a8211" (UID: "1ea5edcd-7ee4-4567-9747-0452a52a8211"). InnerVolumeSpecName "kube-api-access-d4j4q". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:21:49 crc kubenswrapper[4779]: I0929 19:21:49.979721 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1ea5edcd-7ee4-4567-9747-0452a52a8211-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1ea5edcd-7ee4-4567-9747-0452a52a8211" (UID: "1ea5edcd-7ee4-4567-9747-0452a52a8211"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 19:21:50 crc kubenswrapper[4779]: I0929 19:21:50.063653 4779 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1ea5edcd-7ee4-4567-9747-0452a52a8211-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 19:21:50 crc kubenswrapper[4779]: I0929 19:21:50.063702 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d4j4q\" (UniqueName: \"kubernetes.io/projected/1ea5edcd-7ee4-4567-9747-0452a52a8211-kube-api-access-d4j4q\") on node \"crc\" DevicePath \"\"" Sep 29 19:21:50 crc kubenswrapper[4779]: I0929 19:21:50.063714 4779 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1ea5edcd-7ee4-4567-9747-0452a52a8211-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 19:21:50 crc kubenswrapper[4779]: I0929 19:21:50.085263 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/b0ce77c3c22bafa9d9845daaf088adfa5ab8db9a8342ffc3a6e0496b69h87gh" Sep 29 19:21:50 crc kubenswrapper[4779]: I0929 19:21:50.164638 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-h6fcq\" (UniqueName: \"kubernetes.io/projected/e83c2f7a-1165-4d7b-ba2f-ec47f6291cfd-kube-api-access-h6fcq\") pod \"e83c2f7a-1165-4d7b-ba2f-ec47f6291cfd\" (UID: \"e83c2f7a-1165-4d7b-ba2f-ec47f6291cfd\") " Sep 29 19:21:50 crc kubenswrapper[4779]: I0929 19:21:50.164715 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/e83c2f7a-1165-4d7b-ba2f-ec47f6291cfd-util\") pod \"e83c2f7a-1165-4d7b-ba2f-ec47f6291cfd\" (UID: \"e83c2f7a-1165-4d7b-ba2f-ec47f6291cfd\") " Sep 29 19:21:50 crc kubenswrapper[4779]: I0929 19:21:50.164742 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/e83c2f7a-1165-4d7b-ba2f-ec47f6291cfd-bundle\") pod \"e83c2f7a-1165-4d7b-ba2f-ec47f6291cfd\" (UID: \"e83c2f7a-1165-4d7b-ba2f-ec47f6291cfd\") " Sep 29 19:21:50 crc kubenswrapper[4779]: I0929 19:21:50.165734 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e83c2f7a-1165-4d7b-ba2f-ec47f6291cfd-bundle" (OuterVolumeSpecName: "bundle") pod "e83c2f7a-1165-4d7b-ba2f-ec47f6291cfd" (UID: "e83c2f7a-1165-4d7b-ba2f-ec47f6291cfd"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 19:21:50 crc kubenswrapper[4779]: I0929 19:21:50.169251 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e83c2f7a-1165-4d7b-ba2f-ec47f6291cfd-kube-api-access-h6fcq" (OuterVolumeSpecName: "kube-api-access-h6fcq") pod "e83c2f7a-1165-4d7b-ba2f-ec47f6291cfd" (UID: "e83c2f7a-1165-4d7b-ba2f-ec47f6291cfd"). InnerVolumeSpecName "kube-api-access-h6fcq". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:21:50 crc kubenswrapper[4779]: I0929 19:21:50.183805 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e83c2f7a-1165-4d7b-ba2f-ec47f6291cfd-util" (OuterVolumeSpecName: "util") pod "e83c2f7a-1165-4d7b-ba2f-ec47f6291cfd" (UID: "e83c2f7a-1165-4d7b-ba2f-ec47f6291cfd"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 19:21:50 crc kubenswrapper[4779]: I0929 19:21:50.266701 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-h6fcq\" (UniqueName: \"kubernetes.io/projected/e83c2f7a-1165-4d7b-ba2f-ec47f6291cfd-kube-api-access-h6fcq\") on node \"crc\" DevicePath \"\"" Sep 29 19:21:50 crc kubenswrapper[4779]: I0929 19:21:50.266736 4779 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/e83c2f7a-1165-4d7b-ba2f-ec47f6291cfd-util\") on node \"crc\" DevicePath \"\"" Sep 29 19:21:50 crc kubenswrapper[4779]: I0929 19:21:50.266747 4779 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/e83c2f7a-1165-4d7b-ba2f-ec47f6291cfd-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 19:21:50 crc kubenswrapper[4779]: I0929 19:21:50.832940 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-hskt5" event={"ID":"1ea5edcd-7ee4-4567-9747-0452a52a8211","Type":"ContainerDied","Data":"c0e9c07dea0c2efb56e0668324b200ee31b1e6089f4ea05dadfcf980308d3c93"} Sep 29 19:21:50 crc kubenswrapper[4779]: I0929 19:21:50.833010 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-hskt5" Sep 29 19:21:50 crc kubenswrapper[4779]: I0929 19:21:50.833287 4779 scope.go:117] "RemoveContainer" containerID="68cf25ce7a9b4e5e81ffc0e0ef16e1832eeeb519224742228e88d276437201c4" Sep 29 19:21:50 crc kubenswrapper[4779]: I0929 19:21:50.856151 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/b0ce77c3c22bafa9d9845daaf088adfa5ab8db9a8342ffc3a6e0496b69h87gh" event={"ID":"e83c2f7a-1165-4d7b-ba2f-ec47f6291cfd","Type":"ContainerDied","Data":"fd5128c643acf96e593316ae19dcbea1687ba5fa4e32a0dba9be33ab1a0f7579"} Sep 29 19:21:50 crc kubenswrapper[4779]: I0929 19:21:50.856678 4779 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="fd5128c643acf96e593316ae19dcbea1687ba5fa4e32a0dba9be33ab1a0f7579" Sep 29 19:21:50 crc kubenswrapper[4779]: I0929 19:21:50.857533 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/b0ce77c3c22bafa9d9845daaf088adfa5ab8db9a8342ffc3a6e0496b69h87gh" Sep 29 19:21:50 crc kubenswrapper[4779]: I0929 19:21:50.863972 4779 scope.go:117] "RemoveContainer" containerID="3b06f8a382a2c31f14f350270b4065360298b89113da0a97b220aeb616ba4138" Sep 29 19:21:50 crc kubenswrapper[4779]: I0929 19:21:50.902201 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-hskt5"] Sep 29 19:21:50 crc kubenswrapper[4779]: I0929 19:21:50.903545 4779 scope.go:117] "RemoveContainer" containerID="601fe8ffecd03246100a8ba32def6d92f685a620eea08e25d9a01a4a1ffd6148" Sep 29 19:21:50 crc kubenswrapper[4779]: I0929 19:21:50.910118 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-hskt5"] Sep 29 19:21:51 crc kubenswrapper[4779]: I0929 19:21:51.775287 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1ea5edcd-7ee4-4567-9747-0452a52a8211" path="/var/lib/kubelet/pods/1ea5edcd-7ee4-4567-9747-0452a52a8211/volumes" Sep 29 19:21:55 crc kubenswrapper[4779]: I0929 19:21:55.289073 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-controller-operator-764cfd59bc-2w6ls"] Sep 29 19:21:55 crc kubenswrapper[4779]: E0929 19:21:55.289642 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1ea5edcd-7ee4-4567-9747-0452a52a8211" containerName="extract-utilities" Sep 29 19:21:55 crc kubenswrapper[4779]: I0929 19:21:55.289655 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="1ea5edcd-7ee4-4567-9747-0452a52a8211" containerName="extract-utilities" Sep 29 19:21:55 crc kubenswrapper[4779]: E0929 19:21:55.289666 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e83c2f7a-1165-4d7b-ba2f-ec47f6291cfd" containerName="util" Sep 29 19:21:55 crc kubenswrapper[4779]: I0929 19:21:55.289671 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="e83c2f7a-1165-4d7b-ba2f-ec47f6291cfd" containerName="util" Sep 29 19:21:55 crc kubenswrapper[4779]: E0929 19:21:55.289684 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1ea5edcd-7ee4-4567-9747-0452a52a8211" containerName="extract-content" Sep 29 19:21:55 crc kubenswrapper[4779]: I0929 19:21:55.289689 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="1ea5edcd-7ee4-4567-9747-0452a52a8211" containerName="extract-content" Sep 29 19:21:55 crc kubenswrapper[4779]: E0929 19:21:55.289699 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1ea5edcd-7ee4-4567-9747-0452a52a8211" containerName="registry-server" Sep 29 19:21:55 crc kubenswrapper[4779]: I0929 19:21:55.289704 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="1ea5edcd-7ee4-4567-9747-0452a52a8211" containerName="registry-server" Sep 29 19:21:55 crc kubenswrapper[4779]: E0929 19:21:55.289712 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e83c2f7a-1165-4d7b-ba2f-ec47f6291cfd" containerName="extract" Sep 29 19:21:55 crc kubenswrapper[4779]: I0929 19:21:55.289718 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="e83c2f7a-1165-4d7b-ba2f-ec47f6291cfd" containerName="extract" Sep 29 19:21:55 crc kubenswrapper[4779]: E0929 19:21:55.289730 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e83c2f7a-1165-4d7b-ba2f-ec47f6291cfd" containerName="pull" Sep 29 19:21:55 crc kubenswrapper[4779]: I0929 19:21:55.289735 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="e83c2f7a-1165-4d7b-ba2f-ec47f6291cfd" containerName="pull" Sep 29 19:21:55 crc kubenswrapper[4779]: I0929 19:21:55.289835 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="e83c2f7a-1165-4d7b-ba2f-ec47f6291cfd" containerName="extract" Sep 29 19:21:55 crc kubenswrapper[4779]: I0929 19:21:55.289852 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="1ea5edcd-7ee4-4567-9747-0452a52a8211" containerName="registry-server" Sep 29 19:21:55 crc kubenswrapper[4779]: I0929 19:21:55.290496 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-operator-764cfd59bc-2w6ls" Sep 29 19:21:55 crc kubenswrapper[4779]: I0929 19:21:55.292563 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-controller-operator-dockercfg-4xvzt" Sep 29 19:21:55 crc kubenswrapper[4779]: I0929 19:21:55.322577 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-operator-764cfd59bc-2w6ls"] Sep 29 19:21:55 crc kubenswrapper[4779]: I0929 19:21:55.342372 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sbbhd\" (UniqueName: \"kubernetes.io/projected/8988ff92-ee96-4702-875b-a311c8d08a7b-kube-api-access-sbbhd\") pod \"openstack-operator-controller-operator-764cfd59bc-2w6ls\" (UID: \"8988ff92-ee96-4702-875b-a311c8d08a7b\") " pod="openstack-operators/openstack-operator-controller-operator-764cfd59bc-2w6ls" Sep 29 19:21:55 crc kubenswrapper[4779]: I0929 19:21:55.443509 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sbbhd\" (UniqueName: \"kubernetes.io/projected/8988ff92-ee96-4702-875b-a311c8d08a7b-kube-api-access-sbbhd\") pod \"openstack-operator-controller-operator-764cfd59bc-2w6ls\" (UID: \"8988ff92-ee96-4702-875b-a311c8d08a7b\") " pod="openstack-operators/openstack-operator-controller-operator-764cfd59bc-2w6ls" Sep 29 19:21:55 crc kubenswrapper[4779]: I0929 19:21:55.462083 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sbbhd\" (UniqueName: \"kubernetes.io/projected/8988ff92-ee96-4702-875b-a311c8d08a7b-kube-api-access-sbbhd\") pod \"openstack-operator-controller-operator-764cfd59bc-2w6ls\" (UID: \"8988ff92-ee96-4702-875b-a311c8d08a7b\") " pod="openstack-operators/openstack-operator-controller-operator-764cfd59bc-2w6ls" Sep 29 19:21:55 crc kubenswrapper[4779]: I0929 19:21:55.610366 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-operator-764cfd59bc-2w6ls" Sep 29 19:21:55 crc kubenswrapper[4779]: I0929 19:21:55.830610 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-operator-764cfd59bc-2w6ls"] Sep 29 19:21:55 crc kubenswrapper[4779]: I0929 19:21:55.904013 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-operator-764cfd59bc-2w6ls" event={"ID":"8988ff92-ee96-4702-875b-a311c8d08a7b","Type":"ContainerStarted","Data":"fb00d268e14f97f0bb64b1afacebe79cca359e43acb4584a493ccb4841c81cdf"} Sep 29 19:21:57 crc kubenswrapper[4779]: I0929 19:21:57.888729 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-p4hvc"] Sep 29 19:21:57 crc kubenswrapper[4779]: I0929 19:21:57.890045 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-p4hvc" Sep 29 19:21:57 crc kubenswrapper[4779]: I0929 19:21:57.903995 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-p4hvc"] Sep 29 19:21:58 crc kubenswrapper[4779]: I0929 19:21:58.013404 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bcbgr\" (UniqueName: \"kubernetes.io/projected/e3c4c1a8-3a54-4a2e-8fcf-cd4f44e07350-kube-api-access-bcbgr\") pod \"certified-operators-p4hvc\" (UID: \"e3c4c1a8-3a54-4a2e-8fcf-cd4f44e07350\") " pod="openshift-marketplace/certified-operators-p4hvc" Sep 29 19:21:58 crc kubenswrapper[4779]: I0929 19:21:58.013475 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e3c4c1a8-3a54-4a2e-8fcf-cd4f44e07350-catalog-content\") pod \"certified-operators-p4hvc\" (UID: \"e3c4c1a8-3a54-4a2e-8fcf-cd4f44e07350\") " pod="openshift-marketplace/certified-operators-p4hvc" Sep 29 19:21:58 crc kubenswrapper[4779]: I0929 19:21:58.013533 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e3c4c1a8-3a54-4a2e-8fcf-cd4f44e07350-utilities\") pod \"certified-operators-p4hvc\" (UID: \"e3c4c1a8-3a54-4a2e-8fcf-cd4f44e07350\") " pod="openshift-marketplace/certified-operators-p4hvc" Sep 29 19:21:58 crc kubenswrapper[4779]: I0929 19:21:58.114564 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bcbgr\" (UniqueName: \"kubernetes.io/projected/e3c4c1a8-3a54-4a2e-8fcf-cd4f44e07350-kube-api-access-bcbgr\") pod \"certified-operators-p4hvc\" (UID: \"e3c4c1a8-3a54-4a2e-8fcf-cd4f44e07350\") " pod="openshift-marketplace/certified-operators-p4hvc" Sep 29 19:21:58 crc kubenswrapper[4779]: I0929 19:21:58.114895 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e3c4c1a8-3a54-4a2e-8fcf-cd4f44e07350-catalog-content\") pod \"certified-operators-p4hvc\" (UID: \"e3c4c1a8-3a54-4a2e-8fcf-cd4f44e07350\") " pod="openshift-marketplace/certified-operators-p4hvc" Sep 29 19:21:58 crc kubenswrapper[4779]: I0929 19:21:58.114925 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e3c4c1a8-3a54-4a2e-8fcf-cd4f44e07350-utilities\") pod \"certified-operators-p4hvc\" (UID: \"e3c4c1a8-3a54-4a2e-8fcf-cd4f44e07350\") " pod="openshift-marketplace/certified-operators-p4hvc" Sep 29 19:21:58 crc kubenswrapper[4779]: I0929 19:21:58.115462 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e3c4c1a8-3a54-4a2e-8fcf-cd4f44e07350-catalog-content\") pod \"certified-operators-p4hvc\" (UID: \"e3c4c1a8-3a54-4a2e-8fcf-cd4f44e07350\") " pod="openshift-marketplace/certified-operators-p4hvc" Sep 29 19:21:58 crc kubenswrapper[4779]: I0929 19:21:58.115501 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e3c4c1a8-3a54-4a2e-8fcf-cd4f44e07350-utilities\") pod \"certified-operators-p4hvc\" (UID: \"e3c4c1a8-3a54-4a2e-8fcf-cd4f44e07350\") " pod="openshift-marketplace/certified-operators-p4hvc" Sep 29 19:21:58 crc kubenswrapper[4779]: I0929 19:21:58.133366 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bcbgr\" (UniqueName: \"kubernetes.io/projected/e3c4c1a8-3a54-4a2e-8fcf-cd4f44e07350-kube-api-access-bcbgr\") pod \"certified-operators-p4hvc\" (UID: \"e3c4c1a8-3a54-4a2e-8fcf-cd4f44e07350\") " pod="openshift-marketplace/certified-operators-p4hvc" Sep 29 19:21:58 crc kubenswrapper[4779]: I0929 19:21:58.232681 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-p4hvc" Sep 29 19:21:59 crc kubenswrapper[4779]: I0929 19:21:59.958160 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-operator-764cfd59bc-2w6ls" event={"ID":"8988ff92-ee96-4702-875b-a311c8d08a7b","Type":"ContainerStarted","Data":"2caf2b979ca3fe53bcf2072ce03d34a1ea54384d117848665eb6a029f9e136c4"} Sep 29 19:22:00 crc kubenswrapper[4779]: I0929 19:22:00.087818 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-p4hvc"] Sep 29 19:22:00 crc kubenswrapper[4779]: W0929 19:22:00.096125 4779 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode3c4c1a8_3a54_4a2e_8fcf_cd4f44e07350.slice/crio-05a943cac359a4247ea0287f7565c9c41fedd1d2b79e92f7cc99bdf70d78b295 WatchSource:0}: Error finding container 05a943cac359a4247ea0287f7565c9c41fedd1d2b79e92f7cc99bdf70d78b295: Status 404 returned error can't find the container with id 05a943cac359a4247ea0287f7565c9c41fedd1d2b79e92f7cc99bdf70d78b295 Sep 29 19:22:00 crc kubenswrapper[4779]: I0929 19:22:00.966975 4779 generic.go:334] "Generic (PLEG): container finished" podID="e3c4c1a8-3a54-4a2e-8fcf-cd4f44e07350" containerID="e7c1c12ee036ed2f065f7fddb6a089dc24c5c748a607047b803a6e268827ef16" exitCode=0 Sep 29 19:22:00 crc kubenswrapper[4779]: I0929 19:22:00.967015 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-p4hvc" event={"ID":"e3c4c1a8-3a54-4a2e-8fcf-cd4f44e07350","Type":"ContainerDied","Data":"e7c1c12ee036ed2f065f7fddb6a089dc24c5c748a607047b803a6e268827ef16"} Sep 29 19:22:00 crc kubenswrapper[4779]: I0929 19:22:00.967040 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-p4hvc" event={"ID":"e3c4c1a8-3a54-4a2e-8fcf-cd4f44e07350","Type":"ContainerStarted","Data":"05a943cac359a4247ea0287f7565c9c41fedd1d2b79e92f7cc99bdf70d78b295"} Sep 29 19:22:02 crc kubenswrapper[4779]: I0929 19:22:02.988541 4779 generic.go:334] "Generic (PLEG): container finished" podID="e3c4c1a8-3a54-4a2e-8fcf-cd4f44e07350" containerID="6762d966f83a59f4c28f6031aa1ae212cb5180943e9c335ad2611fd3ff027f0a" exitCode=0 Sep 29 19:22:02 crc kubenswrapper[4779]: I0929 19:22:02.988660 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-p4hvc" event={"ID":"e3c4c1a8-3a54-4a2e-8fcf-cd4f44e07350","Type":"ContainerDied","Data":"6762d966f83a59f4c28f6031aa1ae212cb5180943e9c335ad2611fd3ff027f0a"} Sep 29 19:22:02 crc kubenswrapper[4779]: I0929 19:22:02.992269 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-operator-764cfd59bc-2w6ls" event={"ID":"8988ff92-ee96-4702-875b-a311c8d08a7b","Type":"ContainerStarted","Data":"4e07d6db27b59d8025468e9b7fb296cffd33f86d897cd35b6ab0fb4f33af75b1"} Sep 29 19:22:02 crc kubenswrapper[4779]: I0929 19:22:02.993048 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-controller-operator-764cfd59bc-2w6ls" Sep 29 19:22:03 crc kubenswrapper[4779]: I0929 19:22:03.053938 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-controller-operator-764cfd59bc-2w6ls" podStartSLOduration=1.363971024 podStartE2EDuration="8.05391473s" podCreationTimestamp="2025-09-29 19:21:55 +0000 UTC" firstStartedPulling="2025-09-29 19:21:55.836999338 +0000 UTC m=+826.721424438" lastFinishedPulling="2025-09-29 19:22:02.526943044 +0000 UTC m=+833.411368144" observedRunningTime="2025-09-29 19:22:03.042412267 +0000 UTC m=+833.926837407" watchObservedRunningTime="2025-09-29 19:22:03.05391473 +0000 UTC m=+833.938339820" Sep 29 19:22:04 crc kubenswrapper[4779]: I0929 19:22:04.003073 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-p4hvc" event={"ID":"e3c4c1a8-3a54-4a2e-8fcf-cd4f44e07350","Type":"ContainerStarted","Data":"c0ac20fa6c9f6491a27db7662435673c696e85044a459ecc344bf0e347d161fe"} Sep 29 19:22:04 crc kubenswrapper[4779]: I0929 19:22:04.007359 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-controller-operator-764cfd59bc-2w6ls" Sep 29 19:22:04 crc kubenswrapper[4779]: I0929 19:22:04.033884 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-p4hvc" podStartSLOduration=4.594117998 podStartE2EDuration="7.033855047s" podCreationTimestamp="2025-09-29 19:21:57 +0000 UTC" firstStartedPulling="2025-09-29 19:22:01.03953915 +0000 UTC m=+831.923964250" lastFinishedPulling="2025-09-29 19:22:03.479276159 +0000 UTC m=+834.363701299" observedRunningTime="2025-09-29 19:22:04.02661311 +0000 UTC m=+834.911038290" watchObservedRunningTime="2025-09-29 19:22:04.033855047 +0000 UTC m=+834.918280187" Sep 29 19:22:08 crc kubenswrapper[4779]: I0929 19:22:08.233539 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-p4hvc" Sep 29 19:22:08 crc kubenswrapper[4779]: I0929 19:22:08.233883 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-p4hvc" Sep 29 19:22:08 crc kubenswrapper[4779]: I0929 19:22:08.321939 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-p4hvc" Sep 29 19:22:09 crc kubenswrapper[4779]: I0929 19:22:09.101577 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-p4hvc" Sep 29 19:22:09 crc kubenswrapper[4779]: I0929 19:22:09.281398 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-p4hvc"] Sep 29 19:22:11 crc kubenswrapper[4779]: I0929 19:22:11.047611 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-p4hvc" podUID="e3c4c1a8-3a54-4a2e-8fcf-cd4f44e07350" containerName="registry-server" containerID="cri-o://c0ac20fa6c9f6491a27db7662435673c696e85044a459ecc344bf0e347d161fe" gracePeriod=2 Sep 29 19:22:11 crc kubenswrapper[4779]: I0929 19:22:11.486126 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-p4hvc" Sep 29 19:22:11 crc kubenswrapper[4779]: I0929 19:22:11.590725 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e3c4c1a8-3a54-4a2e-8fcf-cd4f44e07350-catalog-content\") pod \"e3c4c1a8-3a54-4a2e-8fcf-cd4f44e07350\" (UID: \"e3c4c1a8-3a54-4a2e-8fcf-cd4f44e07350\") " Sep 29 19:22:11 crc kubenswrapper[4779]: I0929 19:22:11.590885 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bcbgr\" (UniqueName: \"kubernetes.io/projected/e3c4c1a8-3a54-4a2e-8fcf-cd4f44e07350-kube-api-access-bcbgr\") pod \"e3c4c1a8-3a54-4a2e-8fcf-cd4f44e07350\" (UID: \"e3c4c1a8-3a54-4a2e-8fcf-cd4f44e07350\") " Sep 29 19:22:11 crc kubenswrapper[4779]: I0929 19:22:11.590929 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e3c4c1a8-3a54-4a2e-8fcf-cd4f44e07350-utilities\") pod \"e3c4c1a8-3a54-4a2e-8fcf-cd4f44e07350\" (UID: \"e3c4c1a8-3a54-4a2e-8fcf-cd4f44e07350\") " Sep 29 19:22:11 crc kubenswrapper[4779]: I0929 19:22:11.591934 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e3c4c1a8-3a54-4a2e-8fcf-cd4f44e07350-utilities" (OuterVolumeSpecName: "utilities") pod "e3c4c1a8-3a54-4a2e-8fcf-cd4f44e07350" (UID: "e3c4c1a8-3a54-4a2e-8fcf-cd4f44e07350"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 19:22:11 crc kubenswrapper[4779]: I0929 19:22:11.596278 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e3c4c1a8-3a54-4a2e-8fcf-cd4f44e07350-kube-api-access-bcbgr" (OuterVolumeSpecName: "kube-api-access-bcbgr") pod "e3c4c1a8-3a54-4a2e-8fcf-cd4f44e07350" (UID: "e3c4c1a8-3a54-4a2e-8fcf-cd4f44e07350"). InnerVolumeSpecName "kube-api-access-bcbgr". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:22:11 crc kubenswrapper[4779]: I0929 19:22:11.637837 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e3c4c1a8-3a54-4a2e-8fcf-cd4f44e07350-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "e3c4c1a8-3a54-4a2e-8fcf-cd4f44e07350" (UID: "e3c4c1a8-3a54-4a2e-8fcf-cd4f44e07350"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 19:22:11 crc kubenswrapper[4779]: I0929 19:22:11.692630 4779 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e3c4c1a8-3a54-4a2e-8fcf-cd4f44e07350-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 19:22:11 crc kubenswrapper[4779]: I0929 19:22:11.692713 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bcbgr\" (UniqueName: \"kubernetes.io/projected/e3c4c1a8-3a54-4a2e-8fcf-cd4f44e07350-kube-api-access-bcbgr\") on node \"crc\" DevicePath \"\"" Sep 29 19:22:11 crc kubenswrapper[4779]: I0929 19:22:11.692743 4779 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e3c4c1a8-3a54-4a2e-8fcf-cd4f44e07350-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 19:22:12 crc kubenswrapper[4779]: I0929 19:22:12.056084 4779 generic.go:334] "Generic (PLEG): container finished" podID="e3c4c1a8-3a54-4a2e-8fcf-cd4f44e07350" containerID="c0ac20fa6c9f6491a27db7662435673c696e85044a459ecc344bf0e347d161fe" exitCode=0 Sep 29 19:22:12 crc kubenswrapper[4779]: I0929 19:22:12.056123 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-p4hvc" event={"ID":"e3c4c1a8-3a54-4a2e-8fcf-cd4f44e07350","Type":"ContainerDied","Data":"c0ac20fa6c9f6491a27db7662435673c696e85044a459ecc344bf0e347d161fe"} Sep 29 19:22:12 crc kubenswrapper[4779]: I0929 19:22:12.056903 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-p4hvc" event={"ID":"e3c4c1a8-3a54-4a2e-8fcf-cd4f44e07350","Type":"ContainerDied","Data":"05a943cac359a4247ea0287f7565c9c41fedd1d2b79e92f7cc99bdf70d78b295"} Sep 29 19:22:12 crc kubenswrapper[4779]: I0929 19:22:12.056959 4779 scope.go:117] "RemoveContainer" containerID="c0ac20fa6c9f6491a27db7662435673c696e85044a459ecc344bf0e347d161fe" Sep 29 19:22:12 crc kubenswrapper[4779]: I0929 19:22:12.056197 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-p4hvc" Sep 29 19:22:12 crc kubenswrapper[4779]: I0929 19:22:12.088014 4779 scope.go:117] "RemoveContainer" containerID="6762d966f83a59f4c28f6031aa1ae212cb5180943e9c335ad2611fd3ff027f0a" Sep 29 19:22:12 crc kubenswrapper[4779]: I0929 19:22:12.089692 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-p4hvc"] Sep 29 19:22:12 crc kubenswrapper[4779]: I0929 19:22:12.094211 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-p4hvc"] Sep 29 19:22:12 crc kubenswrapper[4779]: I0929 19:22:12.109767 4779 scope.go:117] "RemoveContainer" containerID="e7c1c12ee036ed2f065f7fddb6a089dc24c5c748a607047b803a6e268827ef16" Sep 29 19:22:12 crc kubenswrapper[4779]: I0929 19:22:12.134738 4779 scope.go:117] "RemoveContainer" containerID="c0ac20fa6c9f6491a27db7662435673c696e85044a459ecc344bf0e347d161fe" Sep 29 19:22:12 crc kubenswrapper[4779]: E0929 19:22:12.135405 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c0ac20fa6c9f6491a27db7662435673c696e85044a459ecc344bf0e347d161fe\": container with ID starting with c0ac20fa6c9f6491a27db7662435673c696e85044a459ecc344bf0e347d161fe not found: ID does not exist" containerID="c0ac20fa6c9f6491a27db7662435673c696e85044a459ecc344bf0e347d161fe" Sep 29 19:22:12 crc kubenswrapper[4779]: I0929 19:22:12.135445 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c0ac20fa6c9f6491a27db7662435673c696e85044a459ecc344bf0e347d161fe"} err="failed to get container status \"c0ac20fa6c9f6491a27db7662435673c696e85044a459ecc344bf0e347d161fe\": rpc error: code = NotFound desc = could not find container \"c0ac20fa6c9f6491a27db7662435673c696e85044a459ecc344bf0e347d161fe\": container with ID starting with c0ac20fa6c9f6491a27db7662435673c696e85044a459ecc344bf0e347d161fe not found: ID does not exist" Sep 29 19:22:12 crc kubenswrapper[4779]: I0929 19:22:12.135469 4779 scope.go:117] "RemoveContainer" containerID="6762d966f83a59f4c28f6031aa1ae212cb5180943e9c335ad2611fd3ff027f0a" Sep 29 19:22:12 crc kubenswrapper[4779]: E0929 19:22:12.135735 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6762d966f83a59f4c28f6031aa1ae212cb5180943e9c335ad2611fd3ff027f0a\": container with ID starting with 6762d966f83a59f4c28f6031aa1ae212cb5180943e9c335ad2611fd3ff027f0a not found: ID does not exist" containerID="6762d966f83a59f4c28f6031aa1ae212cb5180943e9c335ad2611fd3ff027f0a" Sep 29 19:22:12 crc kubenswrapper[4779]: I0929 19:22:12.135769 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6762d966f83a59f4c28f6031aa1ae212cb5180943e9c335ad2611fd3ff027f0a"} err="failed to get container status \"6762d966f83a59f4c28f6031aa1ae212cb5180943e9c335ad2611fd3ff027f0a\": rpc error: code = NotFound desc = could not find container \"6762d966f83a59f4c28f6031aa1ae212cb5180943e9c335ad2611fd3ff027f0a\": container with ID starting with 6762d966f83a59f4c28f6031aa1ae212cb5180943e9c335ad2611fd3ff027f0a not found: ID does not exist" Sep 29 19:22:12 crc kubenswrapper[4779]: I0929 19:22:12.135789 4779 scope.go:117] "RemoveContainer" containerID="e7c1c12ee036ed2f065f7fddb6a089dc24c5c748a607047b803a6e268827ef16" Sep 29 19:22:12 crc kubenswrapper[4779]: E0929 19:22:12.136041 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e7c1c12ee036ed2f065f7fddb6a089dc24c5c748a607047b803a6e268827ef16\": container with ID starting with e7c1c12ee036ed2f065f7fddb6a089dc24c5c748a607047b803a6e268827ef16 not found: ID does not exist" containerID="e7c1c12ee036ed2f065f7fddb6a089dc24c5c748a607047b803a6e268827ef16" Sep 29 19:22:12 crc kubenswrapper[4779]: I0929 19:22:12.136060 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e7c1c12ee036ed2f065f7fddb6a089dc24c5c748a607047b803a6e268827ef16"} err="failed to get container status \"e7c1c12ee036ed2f065f7fddb6a089dc24c5c748a607047b803a6e268827ef16\": rpc error: code = NotFound desc = could not find container \"e7c1c12ee036ed2f065f7fddb6a089dc24c5c748a607047b803a6e268827ef16\": container with ID starting with e7c1c12ee036ed2f065f7fddb6a089dc24c5c748a607047b803a6e268827ef16 not found: ID does not exist" Sep 29 19:22:13 crc kubenswrapper[4779]: I0929 19:22:13.777759 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e3c4c1a8-3a54-4a2e-8fcf-cd4f44e07350" path="/var/lib/kubelet/pods/e3c4c1a8-3a54-4a2e-8fcf-cd4f44e07350/volumes" Sep 29 19:22:13 crc kubenswrapper[4779]: I0929 19:22:13.784907 4779 patch_prober.go:28] interesting pod/machine-config-daemon-d5cnr container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 19:22:13 crc kubenswrapper[4779]: I0929 19:22:13.784961 4779 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" podUID="476bc421-1113-455e-bcc8-e207e47dad19" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 19:22:13 crc kubenswrapper[4779]: I0929 19:22:13.785007 4779 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" Sep 29 19:22:13 crc kubenswrapper[4779]: I0929 19:22:13.785604 4779 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"4a4fdaef3556a5f3b6feb69078b0e45220cdb9c5faaa10378f31938118c0fbae"} pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 29 19:22:13 crc kubenswrapper[4779]: I0929 19:22:13.785663 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" podUID="476bc421-1113-455e-bcc8-e207e47dad19" containerName="machine-config-daemon" containerID="cri-o://4a4fdaef3556a5f3b6feb69078b0e45220cdb9c5faaa10378f31938118c0fbae" gracePeriod=600 Sep 29 19:22:14 crc kubenswrapper[4779]: I0929 19:22:14.072747 4779 generic.go:334] "Generic (PLEG): container finished" podID="476bc421-1113-455e-bcc8-e207e47dad19" containerID="4a4fdaef3556a5f3b6feb69078b0e45220cdb9c5faaa10378f31938118c0fbae" exitCode=0 Sep 29 19:22:14 crc kubenswrapper[4779]: I0929 19:22:14.072788 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" event={"ID":"476bc421-1113-455e-bcc8-e207e47dad19","Type":"ContainerDied","Data":"4a4fdaef3556a5f3b6feb69078b0e45220cdb9c5faaa10378f31938118c0fbae"} Sep 29 19:22:14 crc kubenswrapper[4779]: I0929 19:22:14.073063 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" event={"ID":"476bc421-1113-455e-bcc8-e207e47dad19","Type":"ContainerStarted","Data":"0dd6acb2d9b3673c7f5bb54457583b38c1079b65f1949a29faac6a92347b5460"} Sep 29 19:22:14 crc kubenswrapper[4779]: I0929 19:22:14.073085 4779 scope.go:117] "RemoveContainer" containerID="b3ba67db5fe746a0ad4fd3ae08a4d76a8d4a2ecb2a07aee398af84195906c334" Sep 29 19:22:20 crc kubenswrapper[4779]: I0929 19:22:20.444977 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/barbican-operator-controller-manager-6ff8b75857-l2cw6"] Sep 29 19:22:20 crc kubenswrapper[4779]: E0929 19:22:20.445637 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e3c4c1a8-3a54-4a2e-8fcf-cd4f44e07350" containerName="registry-server" Sep 29 19:22:20 crc kubenswrapper[4779]: I0929 19:22:20.445648 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="e3c4c1a8-3a54-4a2e-8fcf-cd4f44e07350" containerName="registry-server" Sep 29 19:22:20 crc kubenswrapper[4779]: E0929 19:22:20.445659 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e3c4c1a8-3a54-4a2e-8fcf-cd4f44e07350" containerName="extract-content" Sep 29 19:22:20 crc kubenswrapper[4779]: I0929 19:22:20.445665 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="e3c4c1a8-3a54-4a2e-8fcf-cd4f44e07350" containerName="extract-content" Sep 29 19:22:20 crc kubenswrapper[4779]: E0929 19:22:20.445675 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e3c4c1a8-3a54-4a2e-8fcf-cd4f44e07350" containerName="extract-utilities" Sep 29 19:22:20 crc kubenswrapper[4779]: I0929 19:22:20.445681 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="e3c4c1a8-3a54-4a2e-8fcf-cd4f44e07350" containerName="extract-utilities" Sep 29 19:22:20 crc kubenswrapper[4779]: I0929 19:22:20.445783 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="e3c4c1a8-3a54-4a2e-8fcf-cd4f44e07350" containerName="registry-server" Sep 29 19:22:20 crc kubenswrapper[4779]: I0929 19:22:20.446306 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/barbican-operator-controller-manager-6ff8b75857-l2cw6" Sep 29 19:22:20 crc kubenswrapper[4779]: I0929 19:22:20.454008 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"barbican-operator-controller-manager-dockercfg-x2ljz" Sep 29 19:22:20 crc kubenswrapper[4779]: I0929 19:22:20.469948 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/cinder-operator-controller-manager-644bddb6d8-txbkn"] Sep 29 19:22:20 crc kubenswrapper[4779]: I0929 19:22:20.476247 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/barbican-operator-controller-manager-6ff8b75857-l2cw6"] Sep 29 19:22:20 crc kubenswrapper[4779]: I0929 19:22:20.476455 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/cinder-operator-controller-manager-644bddb6d8-txbkn" Sep 29 19:22:20 crc kubenswrapper[4779]: I0929 19:22:20.480105 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"cinder-operator-controller-manager-dockercfg-grs6v" Sep 29 19:22:20 crc kubenswrapper[4779]: I0929 19:22:20.509295 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/designate-operator-controller-manager-84f4f7b77b-kmgvf"] Sep 29 19:22:20 crc kubenswrapper[4779]: I0929 19:22:20.510633 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/designate-operator-controller-manager-84f4f7b77b-kmgvf" Sep 29 19:22:20 crc kubenswrapper[4779]: I0929 19:22:20.512546 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"designate-operator-controller-manager-dockercfg-6wmwr" Sep 29 19:22:20 crc kubenswrapper[4779]: I0929 19:22:20.532240 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/glance-operator-controller-manager-84958c4d49-9xr2s"] Sep 29 19:22:20 crc kubenswrapper[4779]: I0929 19:22:20.533158 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/glance-operator-controller-manager-84958c4d49-9xr2s" Sep 29 19:22:20 crc kubenswrapper[4779]: I0929 19:22:20.535113 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"glance-operator-controller-manager-dockercfg-k5xfz" Sep 29 19:22:20 crc kubenswrapper[4779]: I0929 19:22:20.535600 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/heat-operator-controller-manager-5d889d78cf-fql2p"] Sep 29 19:22:20 crc kubenswrapper[4779]: I0929 19:22:20.536347 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/heat-operator-controller-manager-5d889d78cf-fql2p" Sep 29 19:22:20 crc kubenswrapper[4779]: I0929 19:22:20.540465 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"heat-operator-controller-manager-dockercfg-hpjtz" Sep 29 19:22:20 crc kubenswrapper[4779]: I0929 19:22:20.546548 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/cinder-operator-controller-manager-644bddb6d8-txbkn"] Sep 29 19:22:20 crc kubenswrapper[4779]: I0929 19:22:20.557730 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/glance-operator-controller-manager-84958c4d49-9xr2s"] Sep 29 19:22:20 crc kubenswrapper[4779]: I0929 19:22:20.566124 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/designate-operator-controller-manager-84f4f7b77b-kmgvf"] Sep 29 19:22:20 crc kubenswrapper[4779]: I0929 19:22:20.569074 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/heat-operator-controller-manager-5d889d78cf-fql2p"] Sep 29 19:22:20 crc kubenswrapper[4779]: I0929 19:22:20.593252 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/horizon-operator-controller-manager-9f4696d94-8ndmd"] Sep 29 19:22:20 crc kubenswrapper[4779]: I0929 19:22:20.594281 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-controller-manager-9f4696d94-8ndmd" Sep 29 19:22:20 crc kubenswrapper[4779]: I0929 19:22:20.601935 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"horizon-operator-controller-manager-dockercfg-j5m2t" Sep 29 19:22:20 crc kubenswrapper[4779]: I0929 19:22:20.605155 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/infra-operator-controller-manager-7d857cc749-7pf7d"] Sep 29 19:22:20 crc kubenswrapper[4779]: I0929 19:22:20.606197 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-7d857cc749-7pf7d" Sep 29 19:22:20 crc kubenswrapper[4779]: I0929 19:22:20.608291 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2pks5\" (UniqueName: \"kubernetes.io/projected/0e2748e0-ee66-45a1-b018-0798ad0ef293-kube-api-access-2pks5\") pod \"designate-operator-controller-manager-84f4f7b77b-kmgvf\" (UID: \"0e2748e0-ee66-45a1-b018-0798ad0ef293\") " pod="openstack-operators/designate-operator-controller-manager-84f4f7b77b-kmgvf" Sep 29 19:22:20 crc kubenswrapper[4779]: I0929 19:22:20.608360 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zcm4w\" (UniqueName: \"kubernetes.io/projected/5d20194a-c49a-4da1-a081-23d5c3bde845-kube-api-access-zcm4w\") pod \"cinder-operator-controller-manager-644bddb6d8-txbkn\" (UID: \"5d20194a-c49a-4da1-a081-23d5c3bde845\") " pod="openstack-operators/cinder-operator-controller-manager-644bddb6d8-txbkn" Sep 29 19:22:20 crc kubenswrapper[4779]: I0929 19:22:20.608390 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mk77x\" (UniqueName: \"kubernetes.io/projected/7a72bb32-1401-4fb3-a8b5-e2c9d3c7e997-kube-api-access-mk77x\") pod \"barbican-operator-controller-manager-6ff8b75857-l2cw6\" (UID: \"7a72bb32-1401-4fb3-a8b5-e2c9d3c7e997\") " pod="openstack-operators/barbican-operator-controller-manager-6ff8b75857-l2cw6" Sep 29 19:22:20 crc kubenswrapper[4779]: I0929 19:22:20.608473 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/horizon-operator-controller-manager-9f4696d94-8ndmd"] Sep 29 19:22:20 crc kubenswrapper[4779]: I0929 19:22:20.614962 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-webhook-server-cert" Sep 29 19:22:20 crc kubenswrapper[4779]: I0929 19:22:20.617586 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-controller-manager-dockercfg-29wxc" Sep 29 19:22:20 crc kubenswrapper[4779]: I0929 19:22:20.622457 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-controller-manager-7d857cc749-7pf7d"] Sep 29 19:22:20 crc kubenswrapper[4779]: I0929 19:22:20.634858 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/ironic-operator-controller-manager-7975b88857-46trm"] Sep 29 19:22:20 crc kubenswrapper[4779]: I0929 19:22:20.635684 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ironic-operator-controller-manager-7975b88857-46trm" Sep 29 19:22:20 crc kubenswrapper[4779]: I0929 19:22:20.638377 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"ironic-operator-controller-manager-dockercfg-9fstv" Sep 29 19:22:20 crc kubenswrapper[4779]: I0929 19:22:20.654208 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ironic-operator-controller-manager-7975b88857-46trm"] Sep 29 19:22:20 crc kubenswrapper[4779]: I0929 19:22:20.672274 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/keystone-operator-controller-manager-5bd55b4bff-svxsl"] Sep 29 19:22:20 crc kubenswrapper[4779]: I0929 19:22:20.673199 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-5bd55b4bff-svxsl" Sep 29 19:22:20 crc kubenswrapper[4779]: I0929 19:22:20.685988 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"keystone-operator-controller-manager-dockercfg-lqclw" Sep 29 19:22:20 crc kubenswrapper[4779]: I0929 19:22:20.696007 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/manila-operator-controller-manager-6d68dbc695-6x9z9"] Sep 29 19:22:20 crc kubenswrapper[4779]: I0929 19:22:20.697047 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/manila-operator-controller-manager-6d68dbc695-6x9z9" Sep 29 19:22:20 crc kubenswrapper[4779]: I0929 19:22:20.703545 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"manila-operator-controller-manager-dockercfg-qmrzk" Sep 29 19:22:20 crc kubenswrapper[4779]: I0929 19:22:20.705367 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-5bd55b4bff-svxsl"] Sep 29 19:22:20 crc kubenswrapper[4779]: I0929 19:22:20.709971 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wdnvw\" (UniqueName: \"kubernetes.io/projected/df41de35-4c6d-4313-8ccb-19dcead38269-kube-api-access-wdnvw\") pod \"heat-operator-controller-manager-5d889d78cf-fql2p\" (UID: \"df41de35-4c6d-4313-8ccb-19dcead38269\") " pod="openstack-operators/heat-operator-controller-manager-5d889d78cf-fql2p" Sep 29 19:22:20 crc kubenswrapper[4779]: I0929 19:22:20.710044 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2pks5\" (UniqueName: \"kubernetes.io/projected/0e2748e0-ee66-45a1-b018-0798ad0ef293-kube-api-access-2pks5\") pod \"designate-operator-controller-manager-84f4f7b77b-kmgvf\" (UID: \"0e2748e0-ee66-45a1-b018-0798ad0ef293\") " pod="openstack-operators/designate-operator-controller-manager-84f4f7b77b-kmgvf" Sep 29 19:22:20 crc kubenswrapper[4779]: I0929 19:22:20.710067 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h6swn\" (UniqueName: \"kubernetes.io/projected/68c5f3eb-52f4-4ede-ac89-f3a9aafe421b-kube-api-access-h6swn\") pod \"glance-operator-controller-manager-84958c4d49-9xr2s\" (UID: \"68c5f3eb-52f4-4ede-ac89-f3a9aafe421b\") " pod="openstack-operators/glance-operator-controller-manager-84958c4d49-9xr2s" Sep 29 19:22:20 crc kubenswrapper[4779]: I0929 19:22:20.710108 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zcm4w\" (UniqueName: \"kubernetes.io/projected/5d20194a-c49a-4da1-a081-23d5c3bde845-kube-api-access-zcm4w\") pod \"cinder-operator-controller-manager-644bddb6d8-txbkn\" (UID: \"5d20194a-c49a-4da1-a081-23d5c3bde845\") " pod="openstack-operators/cinder-operator-controller-manager-644bddb6d8-txbkn" Sep 29 19:22:20 crc kubenswrapper[4779]: I0929 19:22:20.710136 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/50a9326b-f577-4994-ba3a-28f1ffb1df6c-cert\") pod \"infra-operator-controller-manager-7d857cc749-7pf7d\" (UID: \"50a9326b-f577-4994-ba3a-28f1ffb1df6c\") " pod="openstack-operators/infra-operator-controller-manager-7d857cc749-7pf7d" Sep 29 19:22:20 crc kubenswrapper[4779]: I0929 19:22:20.710156 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mk77x\" (UniqueName: \"kubernetes.io/projected/7a72bb32-1401-4fb3-a8b5-e2c9d3c7e997-kube-api-access-mk77x\") pod \"barbican-operator-controller-manager-6ff8b75857-l2cw6\" (UID: \"7a72bb32-1401-4fb3-a8b5-e2c9d3c7e997\") " pod="openstack-operators/barbican-operator-controller-manager-6ff8b75857-l2cw6" Sep 29 19:22:20 crc kubenswrapper[4779]: I0929 19:22:20.710180 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lf2k7\" (UniqueName: \"kubernetes.io/projected/0b008477-9497-4cb1-9b44-c8c0dacbd0ae-kube-api-access-lf2k7\") pod \"horizon-operator-controller-manager-9f4696d94-8ndmd\" (UID: \"0b008477-9497-4cb1-9b44-c8c0dacbd0ae\") " pod="openstack-operators/horizon-operator-controller-manager-9f4696d94-8ndmd" Sep 29 19:22:20 crc kubenswrapper[4779]: I0929 19:22:20.710196 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-89jzf\" (UniqueName: \"kubernetes.io/projected/50a9326b-f577-4994-ba3a-28f1ffb1df6c-kube-api-access-89jzf\") pod \"infra-operator-controller-manager-7d857cc749-7pf7d\" (UID: \"50a9326b-f577-4994-ba3a-28f1ffb1df6c\") " pod="openstack-operators/infra-operator-controller-manager-7d857cc749-7pf7d" Sep 29 19:22:20 crc kubenswrapper[4779]: I0929 19:22:20.735377 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/manila-operator-controller-manager-6d68dbc695-6x9z9"] Sep 29 19:22:20 crc kubenswrapper[4779]: I0929 19:22:20.751159 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mk77x\" (UniqueName: \"kubernetes.io/projected/7a72bb32-1401-4fb3-a8b5-e2c9d3c7e997-kube-api-access-mk77x\") pod \"barbican-operator-controller-manager-6ff8b75857-l2cw6\" (UID: \"7a72bb32-1401-4fb3-a8b5-e2c9d3c7e997\") " pod="openstack-operators/barbican-operator-controller-manager-6ff8b75857-l2cw6" Sep 29 19:22:20 crc kubenswrapper[4779]: I0929 19:22:20.751229 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-88c7-k26dh"] Sep 29 19:22:20 crc kubenswrapper[4779]: I0929 19:22:20.752405 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-88c7-k26dh" Sep 29 19:22:20 crc kubenswrapper[4779]: I0929 19:22:20.757679 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"mariadb-operator-controller-manager-dockercfg-8hczc" Sep 29 19:22:20 crc kubenswrapper[4779]: I0929 19:22:20.763365 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/neutron-operator-controller-manager-64d7b59854-kvpnd"] Sep 29 19:22:20 crc kubenswrapper[4779]: I0929 19:22:20.764243 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/neutron-operator-controller-manager-64d7b59854-kvpnd" Sep 29 19:22:20 crc kubenswrapper[4779]: I0929 19:22:20.764815 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/barbican-operator-controller-manager-6ff8b75857-l2cw6" Sep 29 19:22:20 crc kubenswrapper[4779]: I0929 19:22:20.768914 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"neutron-operator-controller-manager-dockercfg-ntb5n" Sep 29 19:22:20 crc kubenswrapper[4779]: I0929 19:22:20.769162 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2pks5\" (UniqueName: \"kubernetes.io/projected/0e2748e0-ee66-45a1-b018-0798ad0ef293-kube-api-access-2pks5\") pod \"designate-operator-controller-manager-84f4f7b77b-kmgvf\" (UID: \"0e2748e0-ee66-45a1-b018-0798ad0ef293\") " pod="openstack-operators/designate-operator-controller-manager-84f4f7b77b-kmgvf" Sep 29 19:22:20 crc kubenswrapper[4779]: I0929 19:22:20.781956 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zcm4w\" (UniqueName: \"kubernetes.io/projected/5d20194a-c49a-4da1-a081-23d5c3bde845-kube-api-access-zcm4w\") pod \"cinder-operator-controller-manager-644bddb6d8-txbkn\" (UID: \"5d20194a-c49a-4da1-a081-23d5c3bde845\") " pod="openstack-operators/cinder-operator-controller-manager-644bddb6d8-txbkn" Sep 29 19:22:20 crc kubenswrapper[4779]: I0929 19:22:20.784047 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-88c7-k26dh"] Sep 29 19:22:20 crc kubenswrapper[4779]: I0929 19:22:20.787196 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/neutron-operator-controller-manager-64d7b59854-kvpnd"] Sep 29 19:22:20 crc kubenswrapper[4779]: I0929 19:22:20.803851 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/nova-operator-controller-manager-c7c776c96-94ldb"] Sep 29 19:22:20 crc kubenswrapper[4779]: I0929 19:22:20.804866 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/nova-operator-controller-manager-c7c776c96-94ldb" Sep 29 19:22:20 crc kubenswrapper[4779]: I0929 19:22:20.807707 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"nova-operator-controller-manager-dockercfg-x68wz" Sep 29 19:22:20 crc kubenswrapper[4779]: I0929 19:22:20.813946 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h6swn\" (UniqueName: \"kubernetes.io/projected/68c5f3eb-52f4-4ede-ac89-f3a9aafe421b-kube-api-access-h6swn\") pod \"glance-operator-controller-manager-84958c4d49-9xr2s\" (UID: \"68c5f3eb-52f4-4ede-ac89-f3a9aafe421b\") " pod="openstack-operators/glance-operator-controller-manager-84958c4d49-9xr2s" Sep 29 19:22:20 crc kubenswrapper[4779]: I0929 19:22:20.814030 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/50a9326b-f577-4994-ba3a-28f1ffb1df6c-cert\") pod \"infra-operator-controller-manager-7d857cc749-7pf7d\" (UID: \"50a9326b-f577-4994-ba3a-28f1ffb1df6c\") " pod="openstack-operators/infra-operator-controller-manager-7d857cc749-7pf7d" Sep 29 19:22:20 crc kubenswrapper[4779]: I0929 19:22:20.814062 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z2ngn\" (UniqueName: \"kubernetes.io/projected/36748ad5-2673-4d95-ada2-7ff95f740fa9-kube-api-access-z2ngn\") pod \"ironic-operator-controller-manager-7975b88857-46trm\" (UID: \"36748ad5-2673-4d95-ada2-7ff95f740fa9\") " pod="openstack-operators/ironic-operator-controller-manager-7975b88857-46trm" Sep 29 19:22:20 crc kubenswrapper[4779]: I0929 19:22:20.814085 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lf2k7\" (UniqueName: \"kubernetes.io/projected/0b008477-9497-4cb1-9b44-c8c0dacbd0ae-kube-api-access-lf2k7\") pod \"horizon-operator-controller-manager-9f4696d94-8ndmd\" (UID: \"0b008477-9497-4cb1-9b44-c8c0dacbd0ae\") " pod="openstack-operators/horizon-operator-controller-manager-9f4696d94-8ndmd" Sep 29 19:22:20 crc kubenswrapper[4779]: I0929 19:22:20.814103 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-89jzf\" (UniqueName: \"kubernetes.io/projected/50a9326b-f577-4994-ba3a-28f1ffb1df6c-kube-api-access-89jzf\") pod \"infra-operator-controller-manager-7d857cc749-7pf7d\" (UID: \"50a9326b-f577-4994-ba3a-28f1ffb1df6c\") " pod="openstack-operators/infra-operator-controller-manager-7d857cc749-7pf7d" Sep 29 19:22:20 crc kubenswrapper[4779]: I0929 19:22:20.814140 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wdnvw\" (UniqueName: \"kubernetes.io/projected/df41de35-4c6d-4313-8ccb-19dcead38269-kube-api-access-wdnvw\") pod \"heat-operator-controller-manager-5d889d78cf-fql2p\" (UID: \"df41de35-4c6d-4313-8ccb-19dcead38269\") " pod="openstack-operators/heat-operator-controller-manager-5d889d78cf-fql2p" Sep 29 19:22:20 crc kubenswrapper[4779]: I0929 19:22:20.814172 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jvgc9\" (UniqueName: \"kubernetes.io/projected/65ba17f1-ab1d-4e5b-b204-2ecc74c7daa1-kube-api-access-jvgc9\") pod \"keystone-operator-controller-manager-5bd55b4bff-svxsl\" (UID: \"65ba17f1-ab1d-4e5b-b204-2ecc74c7daa1\") " pod="openstack-operators/keystone-operator-controller-manager-5bd55b4bff-svxsl" Sep 29 19:22:20 crc kubenswrapper[4779]: I0929 19:22:20.814196 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-htlk8\" (UniqueName: \"kubernetes.io/projected/15635458-2ece-4c4b-a011-1c82d097bfdf-kube-api-access-htlk8\") pod \"manila-operator-controller-manager-6d68dbc695-6x9z9\" (UID: \"15635458-2ece-4c4b-a011-1c82d097bfdf\") " pod="openstack-operators/manila-operator-controller-manager-6d68dbc695-6x9z9" Sep 29 19:22:20 crc kubenswrapper[4779]: E0929 19:22:20.814634 4779 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Sep 29 19:22:20 crc kubenswrapper[4779]: E0929 19:22:20.814694 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/50a9326b-f577-4994-ba3a-28f1ffb1df6c-cert podName:50a9326b-f577-4994-ba3a-28f1ffb1df6c nodeName:}" failed. No retries permitted until 2025-09-29 19:22:21.314660819 +0000 UTC m=+852.199085919 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/50a9326b-f577-4994-ba3a-28f1ffb1df6c-cert") pod "infra-operator-controller-manager-7d857cc749-7pf7d" (UID: "50a9326b-f577-4994-ba3a-28f1ffb1df6c") : secret "infra-operator-webhook-server-cert" not found Sep 29 19:22:20 crc kubenswrapper[4779]: I0929 19:22:20.832999 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/nova-operator-controller-manager-c7c776c96-94ldb"] Sep 29 19:22:20 crc kubenswrapper[4779]: I0929 19:22:20.833301 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/cinder-operator-controller-manager-644bddb6d8-txbkn" Sep 29 19:22:20 crc kubenswrapper[4779]: I0929 19:22:20.845741 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/octavia-operator-controller-manager-76fcc6dc7c-mtqwc"] Sep 29 19:22:20 crc kubenswrapper[4779]: I0929 19:22:20.846919 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/octavia-operator-controller-manager-76fcc6dc7c-mtqwc" Sep 29 19:22:20 crc kubenswrapper[4779]: I0929 19:22:20.845863 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h6swn\" (UniqueName: \"kubernetes.io/projected/68c5f3eb-52f4-4ede-ac89-f3a9aafe421b-kube-api-access-h6swn\") pod \"glance-operator-controller-manager-84958c4d49-9xr2s\" (UID: \"68c5f3eb-52f4-4ede-ac89-f3a9aafe421b\") " pod="openstack-operators/glance-operator-controller-manager-84958c4d49-9xr2s" Sep 29 19:22:20 crc kubenswrapper[4779]: I0929 19:22:20.854597 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/designate-operator-controller-manager-84f4f7b77b-kmgvf" Sep 29 19:22:20 crc kubenswrapper[4779]: I0929 19:22:20.855531 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lf2k7\" (UniqueName: \"kubernetes.io/projected/0b008477-9497-4cb1-9b44-c8c0dacbd0ae-kube-api-access-lf2k7\") pod \"horizon-operator-controller-manager-9f4696d94-8ndmd\" (UID: \"0b008477-9497-4cb1-9b44-c8c0dacbd0ae\") " pod="openstack-operators/horizon-operator-controller-manager-9f4696d94-8ndmd" Sep 29 19:22:20 crc kubenswrapper[4779]: I0929 19:22:20.864167 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wdnvw\" (UniqueName: \"kubernetes.io/projected/df41de35-4c6d-4313-8ccb-19dcead38269-kube-api-access-wdnvw\") pod \"heat-operator-controller-manager-5d889d78cf-fql2p\" (UID: \"df41de35-4c6d-4313-8ccb-19dcead38269\") " pod="openstack-operators/heat-operator-controller-manager-5d889d78cf-fql2p" Sep 29 19:22:20 crc kubenswrapper[4779]: I0929 19:22:20.866529 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/glance-operator-controller-manager-84958c4d49-9xr2s" Sep 29 19:22:20 crc kubenswrapper[4779]: I0929 19:22:20.873543 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"octavia-operator-controller-manager-dockercfg-cg258" Sep 29 19:22:20 crc kubenswrapper[4779]: I0929 19:22:20.877236 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/octavia-operator-controller-manager-76fcc6dc7c-mtqwc"] Sep 29 19:22:20 crc kubenswrapper[4779]: I0929 19:22:20.892718 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-89jzf\" (UniqueName: \"kubernetes.io/projected/50a9326b-f577-4994-ba3a-28f1ffb1df6c-kube-api-access-89jzf\") pod \"infra-operator-controller-manager-7d857cc749-7pf7d\" (UID: \"50a9326b-f577-4994-ba3a-28f1ffb1df6c\") " pod="openstack-operators/infra-operator-controller-manager-7d857cc749-7pf7d" Sep 29 19:22:20 crc kubenswrapper[4779]: I0929 19:22:20.893033 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/heat-operator-controller-manager-5d889d78cf-fql2p" Sep 29 19:22:20 crc kubenswrapper[4779]: I0929 19:22:20.925437 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/ovn-operator-controller-manager-9976ff44c-f7xwn"] Sep 29 19:22:20 crc kubenswrapper[4779]: I0929 19:22:20.925688 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qpm7p\" (UniqueName: \"kubernetes.io/projected/9449cbcb-f74f-473e-9c0d-f1737b39c383-kube-api-access-qpm7p\") pod \"nova-operator-controller-manager-c7c776c96-94ldb\" (UID: \"9449cbcb-f74f-473e-9c0d-f1737b39c383\") " pod="openstack-operators/nova-operator-controller-manager-c7c776c96-94ldb" Sep 29 19:22:20 crc kubenswrapper[4779]: I0929 19:22:20.925796 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jvgc9\" (UniqueName: \"kubernetes.io/projected/65ba17f1-ab1d-4e5b-b204-2ecc74c7daa1-kube-api-access-jvgc9\") pod \"keystone-operator-controller-manager-5bd55b4bff-svxsl\" (UID: \"65ba17f1-ab1d-4e5b-b204-2ecc74c7daa1\") " pod="openstack-operators/keystone-operator-controller-manager-5bd55b4bff-svxsl" Sep 29 19:22:20 crc kubenswrapper[4779]: I0929 19:22:20.925933 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-htlk8\" (UniqueName: \"kubernetes.io/projected/15635458-2ece-4c4b-a011-1c82d097bfdf-kube-api-access-htlk8\") pod \"manila-operator-controller-manager-6d68dbc695-6x9z9\" (UID: \"15635458-2ece-4c4b-a011-1c82d097bfdf\") " pod="openstack-operators/manila-operator-controller-manager-6d68dbc695-6x9z9" Sep 29 19:22:20 crc kubenswrapper[4779]: I0929 19:22:20.926069 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qmd7f\" (UniqueName: \"kubernetes.io/projected/833ffe0d-b2b2-4fd5-8094-ad9fe58f60c0-kube-api-access-qmd7f\") pod \"mariadb-operator-controller-manager-88c7-k26dh\" (UID: \"833ffe0d-b2b2-4fd5-8094-ad9fe58f60c0\") " pod="openstack-operators/mariadb-operator-controller-manager-88c7-k26dh" Sep 29 19:22:20 crc kubenswrapper[4779]: I0929 19:22:20.926175 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r59vr\" (UniqueName: \"kubernetes.io/projected/9ae9e131-70db-4bd1-8347-c5714c2b4754-kube-api-access-r59vr\") pod \"neutron-operator-controller-manager-64d7b59854-kvpnd\" (UID: \"9ae9e131-70db-4bd1-8347-c5714c2b4754\") " pod="openstack-operators/neutron-operator-controller-manager-64d7b59854-kvpnd" Sep 29 19:22:20 crc kubenswrapper[4779]: I0929 19:22:20.926304 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z2ngn\" (UniqueName: \"kubernetes.io/projected/36748ad5-2673-4d95-ada2-7ff95f740fa9-kube-api-access-z2ngn\") pod \"ironic-operator-controller-manager-7975b88857-46trm\" (UID: \"36748ad5-2673-4d95-ada2-7ff95f740fa9\") " pod="openstack-operators/ironic-operator-controller-manager-7975b88857-46trm" Sep 29 19:22:20 crc kubenswrapper[4779]: I0929 19:22:20.926708 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-controller-manager-9f4696d94-8ndmd" Sep 29 19:22:20 crc kubenswrapper[4779]: I0929 19:22:20.929196 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ovn-operator-controller-manager-9976ff44c-f7xwn" Sep 29 19:22:20 crc kubenswrapper[4779]: I0929 19:22:20.951939 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"ovn-operator-controller-manager-dockercfg-75x6q" Sep 29 19:22:20 crc kubenswrapper[4779]: I0929 19:22:20.993043 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-htlk8\" (UniqueName: \"kubernetes.io/projected/15635458-2ece-4c4b-a011-1c82d097bfdf-kube-api-access-htlk8\") pod \"manila-operator-controller-manager-6d68dbc695-6x9z9\" (UID: \"15635458-2ece-4c4b-a011-1c82d097bfdf\") " pod="openstack-operators/manila-operator-controller-manager-6d68dbc695-6x9z9" Sep 29 19:22:21 crc kubenswrapper[4779]: I0929 19:22:21.032063 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z2ngn\" (UniqueName: \"kubernetes.io/projected/36748ad5-2673-4d95-ada2-7ff95f740fa9-kube-api-access-z2ngn\") pod \"ironic-operator-controller-manager-7975b88857-46trm\" (UID: \"36748ad5-2673-4d95-ada2-7ff95f740fa9\") " pod="openstack-operators/ironic-operator-controller-manager-7975b88857-46trm" Sep 29 19:22:21 crc kubenswrapper[4779]: I0929 19:22:21.032839 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-6d776955-54f6z"] Sep 29 19:22:21 crc kubenswrapper[4779]: I0929 19:22:21.035936 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/manila-operator-controller-manager-6d68dbc695-6x9z9" Sep 29 19:22:21 crc kubenswrapper[4779]: I0929 19:22:21.037101 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jvgc9\" (UniqueName: \"kubernetes.io/projected/65ba17f1-ab1d-4e5b-b204-2ecc74c7daa1-kube-api-access-jvgc9\") pod \"keystone-operator-controller-manager-5bd55b4bff-svxsl\" (UID: \"65ba17f1-ab1d-4e5b-b204-2ecc74c7daa1\") " pod="openstack-operators/keystone-operator-controller-manager-5bd55b4bff-svxsl" Sep 29 19:22:21 crc kubenswrapper[4779]: I0929 19:22:21.046734 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r59vr\" (UniqueName: \"kubernetes.io/projected/9ae9e131-70db-4bd1-8347-c5714c2b4754-kube-api-access-r59vr\") pod \"neutron-operator-controller-manager-64d7b59854-kvpnd\" (UID: \"9ae9e131-70db-4bd1-8347-c5714c2b4754\") " pod="openstack-operators/neutron-operator-controller-manager-64d7b59854-kvpnd" Sep 29 19:22:21 crc kubenswrapper[4779]: I0929 19:22:21.047094 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jpw6f\" (UniqueName: \"kubernetes.io/projected/63554382-d024-4d43-b5c5-b31b80d47749-kube-api-access-jpw6f\") pod \"ovn-operator-controller-manager-9976ff44c-f7xwn\" (UID: \"63554382-d024-4d43-b5c5-b31b80d47749\") " pod="openstack-operators/ovn-operator-controller-manager-9976ff44c-f7xwn" Sep 29 19:22:21 crc kubenswrapper[4779]: I0929 19:22:21.047273 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qpm7p\" (UniqueName: \"kubernetes.io/projected/9449cbcb-f74f-473e-9c0d-f1737b39c383-kube-api-access-qpm7p\") pod \"nova-operator-controller-manager-c7c776c96-94ldb\" (UID: \"9449cbcb-f74f-473e-9c0d-f1737b39c383\") " pod="openstack-operators/nova-operator-controller-manager-c7c776c96-94ldb" Sep 29 19:22:21 crc kubenswrapper[4779]: I0929 19:22:21.047366 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gzh7q\" (UniqueName: \"kubernetes.io/projected/1c3a147f-0c72-4889-80aa-8b53a0c9ea3f-kube-api-access-gzh7q\") pod \"octavia-operator-controller-manager-76fcc6dc7c-mtqwc\" (UID: \"1c3a147f-0c72-4889-80aa-8b53a0c9ea3f\") " pod="openstack-operators/octavia-operator-controller-manager-76fcc6dc7c-mtqwc" Sep 29 19:22:21 crc kubenswrapper[4779]: I0929 19:22:21.047441 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qmd7f\" (UniqueName: \"kubernetes.io/projected/833ffe0d-b2b2-4fd5-8094-ad9fe58f60c0-kube-api-access-qmd7f\") pod \"mariadb-operator-controller-manager-88c7-k26dh\" (UID: \"833ffe0d-b2b2-4fd5-8094-ad9fe58f60c0\") " pod="openstack-operators/mariadb-operator-controller-manager-88c7-k26dh" Sep 29 19:22:21 crc kubenswrapper[4779]: I0929 19:22:21.065862 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-baremetal-operator-controller-manager-6d776955-54f6z" Sep 29 19:22:21 crc kubenswrapper[4779]: I0929 19:22:21.068220 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-baremetal-operator-controller-manager-dockercfg-2b659" Sep 29 19:22:21 crc kubenswrapper[4779]: I0929 19:22:21.072834 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r59vr\" (UniqueName: \"kubernetes.io/projected/9ae9e131-70db-4bd1-8347-c5714c2b4754-kube-api-access-r59vr\") pod \"neutron-operator-controller-manager-64d7b59854-kvpnd\" (UID: \"9ae9e131-70db-4bd1-8347-c5714c2b4754\") " pod="openstack-operators/neutron-operator-controller-manager-64d7b59854-kvpnd" Sep 29 19:22:21 crc kubenswrapper[4779]: I0929 19:22:21.077402 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ovn-operator-controller-manager-9976ff44c-f7xwn"] Sep 29 19:22:21 crc kubenswrapper[4779]: I0929 19:22:21.081384 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-baremetal-operator-webhook-server-cert" Sep 29 19:22:21 crc kubenswrapper[4779]: I0929 19:22:21.082969 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qpm7p\" (UniqueName: \"kubernetes.io/projected/9449cbcb-f74f-473e-9c0d-f1737b39c383-kube-api-access-qpm7p\") pod \"nova-operator-controller-manager-c7c776c96-94ldb\" (UID: \"9449cbcb-f74f-473e-9c0d-f1737b39c383\") " pod="openstack-operators/nova-operator-controller-manager-c7c776c96-94ldb" Sep 29 19:22:21 crc kubenswrapper[4779]: I0929 19:22:21.105441 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qmd7f\" (UniqueName: \"kubernetes.io/projected/833ffe0d-b2b2-4fd5-8094-ad9fe58f60c0-kube-api-access-qmd7f\") pod \"mariadb-operator-controller-manager-88c7-k26dh\" (UID: \"833ffe0d-b2b2-4fd5-8094-ad9fe58f60c0\") " pod="openstack-operators/mariadb-operator-controller-manager-88c7-k26dh" Sep 29 19:22:21 crc kubenswrapper[4779]: I0929 19:22:21.124414 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-6d776955-54f6z"] Sep 29 19:22:21 crc kubenswrapper[4779]: I0929 19:22:21.133275 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/nova-operator-controller-manager-c7c776c96-94ldb" Sep 29 19:22:21 crc kubenswrapper[4779]: I0929 19:22:21.148227 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/49adbd52-e22f-4cc8-9ccb-6cb8c64f12f4-cert\") pod \"openstack-baremetal-operator-controller-manager-6d776955-54f6z\" (UID: \"49adbd52-e22f-4cc8-9ccb-6cb8c64f12f4\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-6d776955-54f6z" Sep 29 19:22:21 crc kubenswrapper[4779]: I0929 19:22:21.148270 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x6xqd\" (UniqueName: \"kubernetes.io/projected/49adbd52-e22f-4cc8-9ccb-6cb8c64f12f4-kube-api-access-x6xqd\") pod \"openstack-baremetal-operator-controller-manager-6d776955-54f6z\" (UID: \"49adbd52-e22f-4cc8-9ccb-6cb8c64f12f4\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-6d776955-54f6z" Sep 29 19:22:21 crc kubenswrapper[4779]: I0929 19:22:21.148308 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gzh7q\" (UniqueName: \"kubernetes.io/projected/1c3a147f-0c72-4889-80aa-8b53a0c9ea3f-kube-api-access-gzh7q\") pod \"octavia-operator-controller-manager-76fcc6dc7c-mtqwc\" (UID: \"1c3a147f-0c72-4889-80aa-8b53a0c9ea3f\") " pod="openstack-operators/octavia-operator-controller-manager-76fcc6dc7c-mtqwc" Sep 29 19:22:21 crc kubenswrapper[4779]: I0929 19:22:21.148403 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jpw6f\" (UniqueName: \"kubernetes.io/projected/63554382-d024-4d43-b5c5-b31b80d47749-kube-api-access-jpw6f\") pod \"ovn-operator-controller-manager-9976ff44c-f7xwn\" (UID: \"63554382-d024-4d43-b5c5-b31b80d47749\") " pod="openstack-operators/ovn-operator-controller-manager-9976ff44c-f7xwn" Sep 29 19:22:21 crc kubenswrapper[4779]: I0929 19:22:21.161353 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/placement-operator-controller-manager-589c58c6c-8fjvn"] Sep 29 19:22:21 crc kubenswrapper[4779]: I0929 19:22:21.162501 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/placement-operator-controller-manager-589c58c6c-8fjvn" Sep 29 19:22:21 crc kubenswrapper[4779]: I0929 19:22:21.165960 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/swift-operator-controller-manager-bc7dc7bd9-9jgwn"] Sep 29 19:22:21 crc kubenswrapper[4779]: I0929 19:22:21.169943 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/placement-operator-controller-manager-589c58c6c-8fjvn"] Sep 29 19:22:21 crc kubenswrapper[4779]: I0929 19:22:21.170030 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-controller-manager-bc7dc7bd9-9jgwn" Sep 29 19:22:21 crc kubenswrapper[4779]: I0929 19:22:21.176515 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"placement-operator-controller-manager-dockercfg-zc7kf" Sep 29 19:22:21 crc kubenswrapper[4779]: I0929 19:22:21.176811 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"swift-operator-controller-manager-dockercfg-bprkk" Sep 29 19:22:21 crc kubenswrapper[4779]: I0929 19:22:21.180064 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jpw6f\" (UniqueName: \"kubernetes.io/projected/63554382-d024-4d43-b5c5-b31b80d47749-kube-api-access-jpw6f\") pod \"ovn-operator-controller-manager-9976ff44c-f7xwn\" (UID: \"63554382-d024-4d43-b5c5-b31b80d47749\") " pod="openstack-operators/ovn-operator-controller-manager-9976ff44c-f7xwn" Sep 29 19:22:21 crc kubenswrapper[4779]: I0929 19:22:21.193581 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gzh7q\" (UniqueName: \"kubernetes.io/projected/1c3a147f-0c72-4889-80aa-8b53a0c9ea3f-kube-api-access-gzh7q\") pod \"octavia-operator-controller-manager-76fcc6dc7c-mtqwc\" (UID: \"1c3a147f-0c72-4889-80aa-8b53a0c9ea3f\") " pod="openstack-operators/octavia-operator-controller-manager-76fcc6dc7c-mtqwc" Sep 29 19:22:21 crc kubenswrapper[4779]: I0929 19:22:21.193652 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-b8d54b5d7-tflz6"] Sep 29 19:22:21 crc kubenswrapper[4779]: I0929 19:22:21.199397 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/telemetry-operator-controller-manager-b8d54b5d7-tflz6" Sep 29 19:22:21 crc kubenswrapper[4779]: I0929 19:22:21.200379 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/swift-operator-controller-manager-bc7dc7bd9-9jgwn"] Sep 29 19:22:21 crc kubenswrapper[4779]: I0929 19:22:21.203926 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"telemetry-operator-controller-manager-dockercfg-49zw4" Sep 29 19:22:21 crc kubenswrapper[4779]: I0929 19:22:21.210018 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-b8d54b5d7-tflz6"] Sep 29 19:22:21 crc kubenswrapper[4779]: I0929 19:22:21.218452 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/test-operator-controller-manager-f66b554c6-dlgq2"] Sep 29 19:22:21 crc kubenswrapper[4779]: I0929 19:22:21.219663 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/test-operator-controller-manager-f66b554c6-dlgq2" Sep 29 19:22:21 crc kubenswrapper[4779]: I0929 19:22:21.225094 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/test-operator-controller-manager-f66b554c6-dlgq2"] Sep 29 19:22:21 crc kubenswrapper[4779]: I0929 19:22:21.226699 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"test-operator-controller-manager-dockercfg-cvn9s" Sep 29 19:22:21 crc kubenswrapper[4779]: I0929 19:22:21.227392 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/watcher-operator-controller-manager-76669f99c-crptq"] Sep 29 19:22:21 crc kubenswrapper[4779]: I0929 19:22:21.228758 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/watcher-operator-controller-manager-76669f99c-crptq" Sep 29 19:22:21 crc kubenswrapper[4779]: I0929 19:22:21.237377 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"watcher-operator-controller-manager-dockercfg-pjqd7" Sep 29 19:22:21 crc kubenswrapper[4779]: I0929 19:22:21.240780 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/watcher-operator-controller-manager-76669f99c-crptq"] Sep 29 19:22:21 crc kubenswrapper[4779]: I0929 19:22:21.250203 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r86p6\" (UniqueName: \"kubernetes.io/projected/dfc872d3-d6c0-42af-9ab7-7695257d969f-kube-api-access-r86p6\") pod \"placement-operator-controller-manager-589c58c6c-8fjvn\" (UID: \"dfc872d3-d6c0-42af-9ab7-7695257d969f\") " pod="openstack-operators/placement-operator-controller-manager-589c58c6c-8fjvn" Sep 29 19:22:21 crc kubenswrapper[4779]: I0929 19:22:21.250260 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m6hns\" (UniqueName: \"kubernetes.io/projected/38ef5cba-94db-4e0d-b2ad-290293848c65-kube-api-access-m6hns\") pod \"swift-operator-controller-manager-bc7dc7bd9-9jgwn\" (UID: \"38ef5cba-94db-4e0d-b2ad-290293848c65\") " pod="openstack-operators/swift-operator-controller-manager-bc7dc7bd9-9jgwn" Sep 29 19:22:21 crc kubenswrapper[4779]: I0929 19:22:21.250310 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/49adbd52-e22f-4cc8-9ccb-6cb8c64f12f4-cert\") pod \"openstack-baremetal-operator-controller-manager-6d776955-54f6z\" (UID: \"49adbd52-e22f-4cc8-9ccb-6cb8c64f12f4\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-6d776955-54f6z" Sep 29 19:22:21 crc kubenswrapper[4779]: I0929 19:22:21.250344 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x6xqd\" (UniqueName: \"kubernetes.io/projected/49adbd52-e22f-4cc8-9ccb-6cb8c64f12f4-kube-api-access-x6xqd\") pod \"openstack-baremetal-operator-controller-manager-6d776955-54f6z\" (UID: \"49adbd52-e22f-4cc8-9ccb-6cb8c64f12f4\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-6d776955-54f6z" Sep 29 19:22:21 crc kubenswrapper[4779]: E0929 19:22:21.250688 4779 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Sep 29 19:22:21 crc kubenswrapper[4779]: E0929 19:22:21.250802 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/49adbd52-e22f-4cc8-9ccb-6cb8c64f12f4-cert podName:49adbd52-e22f-4cc8-9ccb-6cb8c64f12f4 nodeName:}" failed. No retries permitted until 2025-09-29 19:22:21.750777091 +0000 UTC m=+852.635202191 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/49adbd52-e22f-4cc8-9ccb-6cb8c64f12f4-cert") pod "openstack-baremetal-operator-controller-manager-6d776955-54f6z" (UID: "49adbd52-e22f-4cc8-9ccb-6cb8c64f12f4") : secret "openstack-baremetal-operator-webhook-server-cert" not found Sep 29 19:22:21 crc kubenswrapper[4779]: I0929 19:22:21.264737 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ironic-operator-controller-manager-7975b88857-46trm" Sep 29 19:22:21 crc kubenswrapper[4779]: I0929 19:22:21.268459 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-controller-manager-f846cdb6-8tk8p"] Sep 29 19:22:21 crc kubenswrapper[4779]: I0929 19:22:21.271726 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-manager-f846cdb6-8tk8p" Sep 29 19:22:21 crc kubenswrapper[4779]: I0929 19:22:21.275727 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"webhook-server-cert" Sep 29 19:22:21 crc kubenswrapper[4779]: I0929 19:22:21.275999 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-controller-manager-dockercfg-6l5vl" Sep 29 19:22:21 crc kubenswrapper[4779]: I0929 19:22:21.277064 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-manager-f846cdb6-8tk8p"] Sep 29 19:22:21 crc kubenswrapper[4779]: I0929 19:22:21.282930 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x6xqd\" (UniqueName: \"kubernetes.io/projected/49adbd52-e22f-4cc8-9ccb-6cb8c64f12f4-kube-api-access-x6xqd\") pod \"openstack-baremetal-operator-controller-manager-6d776955-54f6z\" (UID: \"49adbd52-e22f-4cc8-9ccb-6cb8c64f12f4\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-6d776955-54f6z" Sep 29 19:22:21 crc kubenswrapper[4779]: I0929 19:22:21.304731 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-5bd55b4bff-svxsl" Sep 29 19:22:21 crc kubenswrapper[4779]: I0929 19:22:21.305704 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-79d8469568-wl7d2"] Sep 29 19:22:21 crc kubenswrapper[4779]: I0929 19:22:21.312434 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-manager-79d8469568-wl7d2" Sep 29 19:22:21 crc kubenswrapper[4779]: I0929 19:22:21.315148 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"rabbitmq-cluster-operator-controller-manager-dockercfg-jhkhg" Sep 29 19:22:21 crc kubenswrapper[4779]: I0929 19:22:21.331469 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-88c7-k26dh" Sep 29 19:22:21 crc kubenswrapper[4779]: I0929 19:22:21.349814 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-79d8469568-wl7d2"] Sep 29 19:22:21 crc kubenswrapper[4779]: I0929 19:22:21.355003 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vhbwl\" (UniqueName: \"kubernetes.io/projected/cdc62734-f794-43fc-9af8-752098cdf316-kube-api-access-vhbwl\") pod \"telemetry-operator-controller-manager-b8d54b5d7-tflz6\" (UID: \"cdc62734-f794-43fc-9af8-752098cdf316\") " pod="openstack-operators/telemetry-operator-controller-manager-b8d54b5d7-tflz6" Sep 29 19:22:21 crc kubenswrapper[4779]: I0929 19:22:21.355125 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r86p6\" (UniqueName: \"kubernetes.io/projected/dfc872d3-d6c0-42af-9ab7-7695257d969f-kube-api-access-r86p6\") pod \"placement-operator-controller-manager-589c58c6c-8fjvn\" (UID: \"dfc872d3-d6c0-42af-9ab7-7695257d969f\") " pod="openstack-operators/placement-operator-controller-manager-589c58c6c-8fjvn" Sep 29 19:22:21 crc kubenswrapper[4779]: I0929 19:22:21.355150 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/50a9326b-f577-4994-ba3a-28f1ffb1df6c-cert\") pod \"infra-operator-controller-manager-7d857cc749-7pf7d\" (UID: \"50a9326b-f577-4994-ba3a-28f1ffb1df6c\") " pod="openstack-operators/infra-operator-controller-manager-7d857cc749-7pf7d" Sep 29 19:22:21 crc kubenswrapper[4779]: I0929 19:22:21.355171 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m6hns\" (UniqueName: \"kubernetes.io/projected/38ef5cba-94db-4e0d-b2ad-290293848c65-kube-api-access-m6hns\") pod \"swift-operator-controller-manager-bc7dc7bd9-9jgwn\" (UID: \"38ef5cba-94db-4e0d-b2ad-290293848c65\") " pod="openstack-operators/swift-operator-controller-manager-bc7dc7bd9-9jgwn" Sep 29 19:22:21 crc kubenswrapper[4779]: I0929 19:22:21.355207 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/3c04c1cb-ecb2-42a0-82e6-3c2842508041-cert\") pod \"openstack-operator-controller-manager-f846cdb6-8tk8p\" (UID: \"3c04c1cb-ecb2-42a0-82e6-3c2842508041\") " pod="openstack-operators/openstack-operator-controller-manager-f846cdb6-8tk8p" Sep 29 19:22:21 crc kubenswrapper[4779]: I0929 19:22:21.355224 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w5psr\" (UniqueName: \"kubernetes.io/projected/a15b1202-c010-40ae-be51-75fbb766fba0-kube-api-access-w5psr\") pod \"watcher-operator-controller-manager-76669f99c-crptq\" (UID: \"a15b1202-c010-40ae-be51-75fbb766fba0\") " pod="openstack-operators/watcher-operator-controller-manager-76669f99c-crptq" Sep 29 19:22:21 crc kubenswrapper[4779]: I0929 19:22:21.355256 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bclrn\" (UniqueName: \"kubernetes.io/projected/13740318-83f1-4384-9b4c-b8de793773d3-kube-api-access-bclrn\") pod \"test-operator-controller-manager-f66b554c6-dlgq2\" (UID: \"13740318-83f1-4384-9b4c-b8de793773d3\") " pod="openstack-operators/test-operator-controller-manager-f66b554c6-dlgq2" Sep 29 19:22:21 crc kubenswrapper[4779]: I0929 19:22:21.355297 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l9qw4\" (UniqueName: \"kubernetes.io/projected/3c04c1cb-ecb2-42a0-82e6-3c2842508041-kube-api-access-l9qw4\") pod \"openstack-operator-controller-manager-f846cdb6-8tk8p\" (UID: \"3c04c1cb-ecb2-42a0-82e6-3c2842508041\") " pod="openstack-operators/openstack-operator-controller-manager-f846cdb6-8tk8p" Sep 29 19:22:21 crc kubenswrapper[4779]: I0929 19:22:21.363645 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/neutron-operator-controller-manager-64d7b59854-kvpnd" Sep 29 19:22:21 crc kubenswrapper[4779]: I0929 19:22:21.364290 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/50a9326b-f577-4994-ba3a-28f1ffb1df6c-cert\") pod \"infra-operator-controller-manager-7d857cc749-7pf7d\" (UID: \"50a9326b-f577-4994-ba3a-28f1ffb1df6c\") " pod="openstack-operators/infra-operator-controller-manager-7d857cc749-7pf7d" Sep 29 19:22:21 crc kubenswrapper[4779]: I0929 19:22:21.398897 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r86p6\" (UniqueName: \"kubernetes.io/projected/dfc872d3-d6c0-42af-9ab7-7695257d969f-kube-api-access-r86p6\") pod \"placement-operator-controller-manager-589c58c6c-8fjvn\" (UID: \"dfc872d3-d6c0-42af-9ab7-7695257d969f\") " pod="openstack-operators/placement-operator-controller-manager-589c58c6c-8fjvn" Sep 29 19:22:21 crc kubenswrapper[4779]: I0929 19:22:21.398962 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m6hns\" (UniqueName: \"kubernetes.io/projected/38ef5cba-94db-4e0d-b2ad-290293848c65-kube-api-access-m6hns\") pod \"swift-operator-controller-manager-bc7dc7bd9-9jgwn\" (UID: \"38ef5cba-94db-4e0d-b2ad-290293848c65\") " pod="openstack-operators/swift-operator-controller-manager-bc7dc7bd9-9jgwn" Sep 29 19:22:21 crc kubenswrapper[4779]: I0929 19:22:21.456905 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xcrzb\" (UniqueName: \"kubernetes.io/projected/5c9afdac-c252-4cd4-afb1-9d7fb43d86e1-kube-api-access-xcrzb\") pod \"rabbitmq-cluster-operator-manager-79d8469568-wl7d2\" (UID: \"5c9afdac-c252-4cd4-afb1-9d7fb43d86e1\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-79d8469568-wl7d2" Sep 29 19:22:21 crc kubenswrapper[4779]: I0929 19:22:21.457009 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/3c04c1cb-ecb2-42a0-82e6-3c2842508041-cert\") pod \"openstack-operator-controller-manager-f846cdb6-8tk8p\" (UID: \"3c04c1cb-ecb2-42a0-82e6-3c2842508041\") " pod="openstack-operators/openstack-operator-controller-manager-f846cdb6-8tk8p" Sep 29 19:22:21 crc kubenswrapper[4779]: I0929 19:22:21.457035 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w5psr\" (UniqueName: \"kubernetes.io/projected/a15b1202-c010-40ae-be51-75fbb766fba0-kube-api-access-w5psr\") pod \"watcher-operator-controller-manager-76669f99c-crptq\" (UID: \"a15b1202-c010-40ae-be51-75fbb766fba0\") " pod="openstack-operators/watcher-operator-controller-manager-76669f99c-crptq" Sep 29 19:22:21 crc kubenswrapper[4779]: I0929 19:22:21.457071 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bclrn\" (UniqueName: \"kubernetes.io/projected/13740318-83f1-4384-9b4c-b8de793773d3-kube-api-access-bclrn\") pod \"test-operator-controller-manager-f66b554c6-dlgq2\" (UID: \"13740318-83f1-4384-9b4c-b8de793773d3\") " pod="openstack-operators/test-operator-controller-manager-f66b554c6-dlgq2" Sep 29 19:22:21 crc kubenswrapper[4779]: I0929 19:22:21.457125 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l9qw4\" (UniqueName: \"kubernetes.io/projected/3c04c1cb-ecb2-42a0-82e6-3c2842508041-kube-api-access-l9qw4\") pod \"openstack-operator-controller-manager-f846cdb6-8tk8p\" (UID: \"3c04c1cb-ecb2-42a0-82e6-3c2842508041\") " pod="openstack-operators/openstack-operator-controller-manager-f846cdb6-8tk8p" Sep 29 19:22:21 crc kubenswrapper[4779]: I0929 19:22:21.457175 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vhbwl\" (UniqueName: \"kubernetes.io/projected/cdc62734-f794-43fc-9af8-752098cdf316-kube-api-access-vhbwl\") pod \"telemetry-operator-controller-manager-b8d54b5d7-tflz6\" (UID: \"cdc62734-f794-43fc-9af8-752098cdf316\") " pod="openstack-operators/telemetry-operator-controller-manager-b8d54b5d7-tflz6" Sep 29 19:22:21 crc kubenswrapper[4779]: I0929 19:22:21.460838 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/octavia-operator-controller-manager-76fcc6dc7c-mtqwc" Sep 29 19:22:21 crc kubenswrapper[4779]: I0929 19:22:21.464623 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/3c04c1cb-ecb2-42a0-82e6-3c2842508041-cert\") pod \"openstack-operator-controller-manager-f846cdb6-8tk8p\" (UID: \"3c04c1cb-ecb2-42a0-82e6-3c2842508041\") " pod="openstack-operators/openstack-operator-controller-manager-f846cdb6-8tk8p" Sep 29 19:22:21 crc kubenswrapper[4779]: I0929 19:22:21.476969 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ovn-operator-controller-manager-9976ff44c-f7xwn" Sep 29 19:22:21 crc kubenswrapper[4779]: I0929 19:22:21.480257 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vhbwl\" (UniqueName: \"kubernetes.io/projected/cdc62734-f794-43fc-9af8-752098cdf316-kube-api-access-vhbwl\") pod \"telemetry-operator-controller-manager-b8d54b5d7-tflz6\" (UID: \"cdc62734-f794-43fc-9af8-752098cdf316\") " pod="openstack-operators/telemetry-operator-controller-manager-b8d54b5d7-tflz6" Sep 29 19:22:21 crc kubenswrapper[4779]: I0929 19:22:21.488980 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bclrn\" (UniqueName: \"kubernetes.io/projected/13740318-83f1-4384-9b4c-b8de793773d3-kube-api-access-bclrn\") pod \"test-operator-controller-manager-f66b554c6-dlgq2\" (UID: \"13740318-83f1-4384-9b4c-b8de793773d3\") " pod="openstack-operators/test-operator-controller-manager-f66b554c6-dlgq2" Sep 29 19:22:21 crc kubenswrapper[4779]: I0929 19:22:21.489519 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l9qw4\" (UniqueName: \"kubernetes.io/projected/3c04c1cb-ecb2-42a0-82e6-3c2842508041-kube-api-access-l9qw4\") pod \"openstack-operator-controller-manager-f846cdb6-8tk8p\" (UID: \"3c04c1cb-ecb2-42a0-82e6-3c2842508041\") " pod="openstack-operators/openstack-operator-controller-manager-f846cdb6-8tk8p" Sep 29 19:22:21 crc kubenswrapper[4779]: I0929 19:22:21.489995 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w5psr\" (UniqueName: \"kubernetes.io/projected/a15b1202-c010-40ae-be51-75fbb766fba0-kube-api-access-w5psr\") pod \"watcher-operator-controller-manager-76669f99c-crptq\" (UID: \"a15b1202-c010-40ae-be51-75fbb766fba0\") " pod="openstack-operators/watcher-operator-controller-manager-76669f99c-crptq" Sep 29 19:22:21 crc kubenswrapper[4779]: I0929 19:22:21.559003 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-7d857cc749-7pf7d" Sep 29 19:22:21 crc kubenswrapper[4779]: I0929 19:22:21.591751 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xcrzb\" (UniqueName: \"kubernetes.io/projected/5c9afdac-c252-4cd4-afb1-9d7fb43d86e1-kube-api-access-xcrzb\") pod \"rabbitmq-cluster-operator-manager-79d8469568-wl7d2\" (UID: \"5c9afdac-c252-4cd4-afb1-9d7fb43d86e1\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-79d8469568-wl7d2" Sep 29 19:22:21 crc kubenswrapper[4779]: I0929 19:22:21.610706 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/placement-operator-controller-manager-589c58c6c-8fjvn" Sep 29 19:22:21 crc kubenswrapper[4779]: I0929 19:22:21.613740 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xcrzb\" (UniqueName: \"kubernetes.io/projected/5c9afdac-c252-4cd4-afb1-9d7fb43d86e1-kube-api-access-xcrzb\") pod \"rabbitmq-cluster-operator-manager-79d8469568-wl7d2\" (UID: \"5c9afdac-c252-4cd4-afb1-9d7fb43d86e1\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-79d8469568-wl7d2" Sep 29 19:22:21 crc kubenswrapper[4779]: I0929 19:22:21.619868 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-controller-manager-bc7dc7bd9-9jgwn" Sep 29 19:22:21 crc kubenswrapper[4779]: I0929 19:22:21.630028 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/telemetry-operator-controller-manager-b8d54b5d7-tflz6" Sep 29 19:22:21 crc kubenswrapper[4779]: I0929 19:22:21.638956 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/test-operator-controller-manager-f66b554c6-dlgq2" Sep 29 19:22:21 crc kubenswrapper[4779]: I0929 19:22:21.647713 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/watcher-operator-controller-manager-76669f99c-crptq" Sep 29 19:22:21 crc kubenswrapper[4779]: I0929 19:22:21.662806 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-manager-f846cdb6-8tk8p" Sep 29 19:22:21 crc kubenswrapper[4779]: I0929 19:22:21.670859 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-manager-79d8469568-wl7d2" Sep 29 19:22:21 crc kubenswrapper[4779]: I0929 19:22:21.794267 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/49adbd52-e22f-4cc8-9ccb-6cb8c64f12f4-cert\") pod \"openstack-baremetal-operator-controller-manager-6d776955-54f6z\" (UID: \"49adbd52-e22f-4cc8-9ccb-6cb8c64f12f4\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-6d776955-54f6z" Sep 29 19:22:21 crc kubenswrapper[4779]: I0929 19:22:21.800182 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/49adbd52-e22f-4cc8-9ccb-6cb8c64f12f4-cert\") pod \"openstack-baremetal-operator-controller-manager-6d776955-54f6z\" (UID: \"49adbd52-e22f-4cc8-9ccb-6cb8c64f12f4\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-6d776955-54f6z" Sep 29 19:22:21 crc kubenswrapper[4779]: I0929 19:22:21.802892 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-baremetal-operator-controller-manager-6d776955-54f6z" Sep 29 19:22:22 crc kubenswrapper[4779]: I0929 19:22:22.049033 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/cinder-operator-controller-manager-644bddb6d8-txbkn"] Sep 29 19:22:22 crc kubenswrapper[4779]: I0929 19:22:22.059583 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/barbican-operator-controller-manager-6ff8b75857-l2cw6"] Sep 29 19:22:22 crc kubenswrapper[4779]: W0929 19:22:22.064046 4779 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7a72bb32_1401_4fb3_a8b5_e2c9d3c7e997.slice/crio-77189395f03521f041bf3612f8438066baae39c89b4f40b1165f347ecd6fd9de WatchSource:0}: Error finding container 77189395f03521f041bf3612f8438066baae39c89b4f40b1165f347ecd6fd9de: Status 404 returned error can't find the container with id 77189395f03521f041bf3612f8438066baae39c89b4f40b1165f347ecd6fd9de Sep 29 19:22:22 crc kubenswrapper[4779]: I0929 19:22:22.211202 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-6ff8b75857-l2cw6" event={"ID":"7a72bb32-1401-4fb3-a8b5-e2c9d3c7e997","Type":"ContainerStarted","Data":"77189395f03521f041bf3612f8438066baae39c89b4f40b1165f347ecd6fd9de"} Sep 29 19:22:22 crc kubenswrapper[4779]: I0929 19:22:22.212454 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-644bddb6d8-txbkn" event={"ID":"5d20194a-c49a-4da1-a081-23d5c3bde845","Type":"ContainerStarted","Data":"0758fe5c80b93f5f107f0842507683f7aa9388c58bf408b2e58677dee52937aa"} Sep 29 19:22:22 crc kubenswrapper[4779]: I0929 19:22:22.416059 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/designate-operator-controller-manager-84f4f7b77b-kmgvf"] Sep 29 19:22:22 crc kubenswrapper[4779]: I0929 19:22:22.424598 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-5bd55b4bff-svxsl"] Sep 29 19:22:22 crc kubenswrapper[4779]: I0929 19:22:22.434212 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/glance-operator-controller-manager-84958c4d49-9xr2s"] Sep 29 19:22:22 crc kubenswrapper[4779]: I0929 19:22:22.458958 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/manila-operator-controller-manager-6d68dbc695-6x9z9"] Sep 29 19:22:22 crc kubenswrapper[4779]: W0929 19:22:22.467086 4779 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod15635458_2ece_4c4b_a011_1c82d097bfdf.slice/crio-7f93fc7ae6f956c10fbe8adaed7f8806fcf1e4514859dd57b7f1ef9539788be4 WatchSource:0}: Error finding container 7f93fc7ae6f956c10fbe8adaed7f8806fcf1e4514859dd57b7f1ef9539788be4: Status 404 returned error can't find the container with id 7f93fc7ae6f956c10fbe8adaed7f8806fcf1e4514859dd57b7f1ef9539788be4 Sep 29 19:22:22 crc kubenswrapper[4779]: I0929 19:22:22.468159 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/horizon-operator-controller-manager-9f4696d94-8ndmd"] Sep 29 19:22:22 crc kubenswrapper[4779]: I0929 19:22:22.473166 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/nova-operator-controller-manager-c7c776c96-94ldb"] Sep 29 19:22:22 crc kubenswrapper[4779]: I0929 19:22:22.741844 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/neutron-operator-controller-manager-64d7b59854-kvpnd"] Sep 29 19:22:22 crc kubenswrapper[4779]: I0929 19:22:22.760865 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ironic-operator-controller-manager-7975b88857-46trm"] Sep 29 19:22:22 crc kubenswrapper[4779]: I0929 19:22:22.770055 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/placement-operator-controller-manager-589c58c6c-8fjvn"] Sep 29 19:22:22 crc kubenswrapper[4779]: I0929 19:22:22.779725 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/watcher-operator-controller-manager-76669f99c-crptq"] Sep 29 19:22:22 crc kubenswrapper[4779]: I0929 19:22:22.789445 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-88c7-k26dh"] Sep 29 19:22:22 crc kubenswrapper[4779]: W0929 19:22:22.792636 4779 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod50a9326b_f577_4994_ba3a_28f1ffb1df6c.slice/crio-4aa8d1f2ea65c23bb96ba72c60288c84f1b1c1031267e82467d348283aeeb753 WatchSource:0}: Error finding container 4aa8d1f2ea65c23bb96ba72c60288c84f1b1c1031267e82467d348283aeeb753: Status 404 returned error can't find the container with id 4aa8d1f2ea65c23bb96ba72c60288c84f1b1c1031267e82467d348283aeeb753 Sep 29 19:22:22 crc kubenswrapper[4779]: I0929 19:22:22.797276 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-controller-manager-7d857cc749-7pf7d"] Sep 29 19:22:22 crc kubenswrapper[4779]: W0929 19:22:22.799107 4779 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod36748ad5_2673_4d95_ada2_7ff95f740fa9.slice/crio-ecb87ae891a40aeff043c6a8ce4ffb08e2d136043e1b79955d550248d5c79432 WatchSource:0}: Error finding container ecb87ae891a40aeff043c6a8ce4ffb08e2d136043e1b79955d550248d5c79432: Status 404 returned error can't find the container with id ecb87ae891a40aeff043c6a8ce4ffb08e2d136043e1b79955d550248d5c79432 Sep 29 19:22:22 crc kubenswrapper[4779]: W0929 19:22:22.803761 4779 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod833ffe0d_b2b2_4fd5_8094_ad9fe58f60c0.slice/crio-34a73724b147a89b5723ad4fa01976ee3040c531979b82d000c685a134d2ba96 WatchSource:0}: Error finding container 34a73724b147a89b5723ad4fa01976ee3040c531979b82d000c685a134d2ba96: Status 404 returned error can't find the container with id 34a73724b147a89b5723ad4fa01976ee3040c531979b82d000c685a134d2ba96 Sep 29 19:22:22 crc kubenswrapper[4779]: W0929 19:22:22.817002 4779 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poddfc872d3_d6c0_42af_9ab7_7695257d969f.slice/crio-3991f5f67edf3229d133a2e7a5c7f394f0e181091dca18c0951e0e8899a52eef WatchSource:0}: Error finding container 3991f5f67edf3229d133a2e7a5c7f394f0e181091dca18c0951e0e8899a52eef: Status 404 returned error can't find the container with id 3991f5f67edf3229d133a2e7a5c7f394f0e181091dca18c0951e0e8899a52eef Sep 29 19:22:22 crc kubenswrapper[4779]: I0929 19:22:22.822016 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/heat-operator-controller-manager-5d889d78cf-fql2p"] Sep 29 19:22:22 crc kubenswrapper[4779]: I0929 19:22:22.833924 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/swift-operator-controller-manager-bc7dc7bd9-9jgwn"] Sep 29 19:22:22 crc kubenswrapper[4779]: I0929 19:22:22.833967 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ovn-operator-controller-manager-9976ff44c-f7xwn"] Sep 29 19:22:22 crc kubenswrapper[4779]: E0929 19:22:22.840107 4779 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/ovn-operator@sha256:1051afc168038fb814f75e7a5f07c588b295a83ebd143dcd8b46d799e31ad302,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-jpw6f,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod ovn-operator-controller-manager-9976ff44c-f7xwn_openstack-operators(63554382-d024-4d43-b5c5-b31b80d47749): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Sep 29 19:22:22 crc kubenswrapper[4779]: E0929 19:22:22.840244 4779 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/placement-operator@sha256:a6b3408d79df6b6d4a467e49defaa4a9d9c088c94d0605a4fee0030c9ccc84d2,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-r86p6,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod placement-operator-controller-manager-589c58c6c-8fjvn_openstack-operators(dfc872d3-d6c0-42af-9ab7-7695257d969f): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Sep 29 19:22:22 crc kubenswrapper[4779]: I0929 19:22:22.840506 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/octavia-operator-controller-manager-76fcc6dc7c-mtqwc"] Sep 29 19:22:22 crc kubenswrapper[4779]: E0929 19:22:22.844293 4779 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/octavia-operator@sha256:4d08afd31dc5ded10c54a5541f514ac351e9b40a183285b3db27d0757a6354c8,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-gzh7q,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod octavia-operator-controller-manager-76fcc6dc7c-mtqwc_openstack-operators(1c3a147f-0c72-4889-80aa-8b53a0c9ea3f): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Sep 29 19:22:22 crc kubenswrapper[4779]: E0929 19:22:22.844526 4779 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/heat-operator@sha256:917e6dcc519277c46e42898bc9f0f066790fa7b9633fcde668cc8a68a547c13c,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-wdnvw,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod heat-operator-controller-manager-5d889d78cf-fql2p_openstack-operators(df41de35-4c6d-4313-8ccb-19dcead38269): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Sep 29 19:22:22 crc kubenswrapper[4779]: E0929 19:22:22.844937 4779 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/swift-operator@sha256:3c6f7d737e0196ec302f44354228d783ad3b210a75703dda3b39c15c01a67e8c,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-m6hns,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000660000,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod swift-operator-controller-manager-bc7dc7bd9-9jgwn_openstack-operators(38ef5cba-94db-4e0d-b2ad-290293848c65): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Sep 29 19:22:22 crc kubenswrapper[4779]: I0929 19:22:22.846359 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-b8d54b5d7-tflz6"] Sep 29 19:22:22 crc kubenswrapper[4779]: E0929 19:22:22.846958 4779 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/telemetry-operator@sha256:8fdf377daf05e2fa7346505017078fa81981dd945bf635a64c8022633c68118f,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-vhbwl,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod telemetry-operator-controller-manager-b8d54b5d7-tflz6_openstack-operators(cdc62734-f794-43fc-9af8-752098cdf316): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Sep 29 19:22:22 crc kubenswrapper[4779]: W0929 19:22:22.909106 4779 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5c9afdac_c252_4cd4_afb1_9d7fb43d86e1.slice/crio-fd41a2e47fe642dbd8c6eda76689d0e9d7a97bb27e7a0837b56ff8ca4afaca3a WatchSource:0}: Error finding container fd41a2e47fe642dbd8c6eda76689d0e9d7a97bb27e7a0837b56ff8ca4afaca3a: Status 404 returned error can't find the container with id fd41a2e47fe642dbd8c6eda76689d0e9d7a97bb27e7a0837b56ff8ca4afaca3a Sep 29 19:22:22 crc kubenswrapper[4779]: I0929 19:22:22.916768 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/test-operator-controller-manager-f66b554c6-dlgq2"] Sep 29 19:22:22 crc kubenswrapper[4779]: E0929 19:22:22.921128 4779 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/openstack-baremetal-operator@sha256:e3f947e9034a951620a76eaf41ceec95eefcef0eacb251b10993d6820d5e1af6,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:true,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_AGENT_IMAGE_URL_DEFAULT,Value:quay.io/openstack-k8s-operators/openstack-baremetal-operator-agent:latest,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_ANSIBLEEE_IMAGE_URL_DEFAULT,Value:quay.io/openstack-k8s-operators/openstack-ansibleee-runner:latest,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_AODH_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-aodh-api:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_AODH_EVALUATOR_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-aodh-evaluator:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_AODH_LISTENER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-aodh-listener:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_AODH_NOTIFIER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-aodh-notifier:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_APACHE_IMAGE_URL_DEFAULT,Value:registry.redhat.io/ubi9/httpd-24:latest,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_BARBICAN_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-barbican-api:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_BARBICAN_KEYSTONE_LISTENER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-barbican-keystone-listener:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_BARBICAN_WORKER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-barbican-worker:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CEILOMETER_CENTRAL_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ceilometer-central:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CEILOMETER_COMPUTE_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CEILOMETER_IPMI_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ceilometer-ipmi:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CEILOMETER_MYSQLD_EXPORTER_IMAGE_URL_DEFAULT,Value:quay.io/prometheus/mysqld-exporter:v0.15.1,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CEILOMETER_NOTIFICATION_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ceilometer-notification:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CEILOMETER_SGCORE_IMAGE_URL_DEFAULT,Value:quay.io/openstack-k8s-operators/sg-core:latest,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CINDER_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-cinder-api:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CINDER_BACKUP_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-cinder-backup:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CINDER_SCHEDULER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-cinder-scheduler:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CINDER_VOLUME_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-cinder-volume:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_DESIGNATE_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-designate-api:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_DESIGNATE_BACKENDBIND9_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-designate-backend-bind9:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_DESIGNATE_CENTRAL_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-designate-central:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_DESIGNATE_MDNS_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-designate-mdns:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_DESIGNATE_PRODUCER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-designate-producer:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_DESIGNATE_UNBOUND_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-unbound:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_DESIGNATE_WORKER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-designate-worker:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_FRR_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-frr:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_ISCSID_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-iscsid:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_KEPLER_IMAGE_URL_DEFAULT,Value:quay.io/sustainable_computing_io/kepler:release-0.7.12,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_LOGROTATE_CROND_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-cron:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_MULTIPATHD_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-multipathd:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_NEUTRON_DHCP_AGENT_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-neutron-dhcp-agent:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_NEUTRON_METADATA_AGENT_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_NEUTRON_OVN_AGENT_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-neutron-ovn-agent:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_NEUTRON_SRIOV_AGENT_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-neutron-sriov-agent:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_NODE_EXPORTER_IMAGE_URL_DEFAULT,Value:quay.io/prometheus/node-exporter:v1.5.0,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_OVN_BGP_AGENT_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ovn-bgp-agent:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_PODMAN_EXPORTER_IMAGE_URL_DEFAULT,Value:quay.io/navidys/prometheus-podman-exporter:v1.10.1,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_GLANCE_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-glance-api:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_HEAT_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-heat-api:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_HEAT_CFNAPI_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-heat-api-cfn:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_HEAT_ENGINE_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-heat-engine:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_HORIZON_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-horizon:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_INFRA_MEMCACHED_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-memcached:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_INFRA_REDIS_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-redis:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_IRONIC_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ironic-api:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_IRONIC_CONDUCTOR_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ironic-conductor:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_IRONIC_INSPECTOR_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ironic-inspector:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_IRONIC_NEUTRON_AGENT_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ironic-neutron-agent:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_IRONIC_PXE_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ironic-pxe:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_IRONIC_PYTHON_AGENT_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/ironic-python-agent:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_KEYSTONE_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-keystone:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_KSM_IMAGE_URL_DEFAULT,Value:registry.k8s.io/kube-state-metrics/kube-state-metrics:v2.15.0,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_MANILA_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-manila-api:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_MANILA_SCHEDULER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-manila-scheduler:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_MANILA_SHARE_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-manila-share:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_MARIADB_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-mariadb:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_NET_UTILS_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-netutils:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_NEUTRON_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_NOVA_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-nova-api:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_NOVA_COMPUTE_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-nova-compute:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_NOVA_CONDUCTOR_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-nova-conductor:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_NOVA_NOVNC_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-nova-novncproxy:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_NOVA_SCHEDULER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-nova-scheduler:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OCTAVIA_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-octavia-api:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OCTAVIA_HEALTHMANAGER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-octavia-health-manager:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OCTAVIA_HOUSEKEEPING_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-octavia-housekeeping:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OCTAVIA_RSYSLOG_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-rsyslog:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OCTAVIA_WORKER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-octavia-worker:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OPENSTACK_CLIENT_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-openstackclient:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OPENSTACK_LIGHTSPEED_IMAGE_URL_DEFAULT,Value:quay.io/openstack-lightspeed/rag-content:os-docs-2024.2,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OPENSTACK_MUST_GATHER_DEFAULT,Value:quay.io/openstack-k8s-operators/openstack-must-gather:latest,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OPENSTACK_NETWORK_EXPORTER_IMAGE_URL_DEFAULT,Value:quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OS_CONTAINER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/edpm-hardened-uefi:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OVN_CONTROLLER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OVN_CONTROLLER_OVS_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ovn-base:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OVN_NB_DBCLUSTER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ovn-nb-db-server:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OVN_NORTHD_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ovn-northd:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OVN_SB_DBCLUSTER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ovn-sb-db-server:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_PLACEMENT_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-placement-api:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_RABBITMQ_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-rabbitmq:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_SWIFT_ACCOUNT_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-swift-account:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_SWIFT_CONTAINER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-swift-container:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_SWIFT_OBJECT_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-swift-object:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_SWIFT_PROXY_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-swift-proxy-server:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_TEST_TEMPEST_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-tempest-all:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_WATCHER_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-master-centos9/openstack-watcher-api:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_WATCHER_APPLIER_IMAGE_URL_DEFAULT,Value:quay.io/podified-master-centos9/openstack-watcher-applier:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_WATCHER_DECISION_ENGINE_IMAGE_URL_DEFAULT,Value:quay.io/podified-master-centos9/openstack-watcher-decision-engine:current-podified,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:cert,ReadOnly:true,MountPath:/tmp/k8s-webhook-server/serving-certs,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-x6xqd,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000660000,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod openstack-baremetal-operator-controller-manager-6d776955-54f6z_openstack-operators(49adbd52-e22f-4cc8-9ccb-6cb8c64f12f4): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Sep 29 19:22:22 crc kubenswrapper[4779]: E0929 19:22:22.922575 4779 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:operator,Image:quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:225524223bf2a7f3a4ce95958fc9ca6fdab02745fb70374e8ff5bf1ddaceda4b,Command:[/manager],Args:[],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:metrics,HostPort:0,ContainerPort:9782,Protocol:TCP,HostIP:,},},Env:[]EnvVar{EnvVar{Name:OPERATOR_NAMESPACE,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:metadata.namespace,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{200 -3} {} 200m DecimalSI},memory: {{524288000 0} {} 500Mi BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-xcrzb,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000660000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod rabbitmq-cluster-operator-manager-79d8469568-wl7d2_openstack-operators(5c9afdac-c252-4cd4-afb1-9d7fb43d86e1): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Sep 29 19:22:22 crc kubenswrapper[4779]: E0929 19:22:22.924045 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-79d8469568-wl7d2" podUID="5c9afdac-c252-4cd4-afb1-9d7fb43d86e1" Sep 29 19:22:22 crc kubenswrapper[4779]: I0929 19:22:22.928913 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-79d8469568-wl7d2"] Sep 29 19:22:22 crc kubenswrapper[4779]: I0929 19:22:22.941512 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-manager-f846cdb6-8tk8p"] Sep 29 19:22:22 crc kubenswrapper[4779]: I0929 19:22:22.944196 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-6d776955-54f6z"] Sep 29 19:22:22 crc kubenswrapper[4779]: E0929 19:22:22.995707 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/ovn-operator-controller-manager-9976ff44c-f7xwn" podUID="63554382-d024-4d43-b5c5-b31b80d47749" Sep 29 19:22:23 crc kubenswrapper[4779]: E0929 19:22:23.019807 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/placement-operator-controller-manager-589c58c6c-8fjvn" podUID="dfc872d3-d6c0-42af-9ab7-7695257d969f" Sep 29 19:22:23 crc kubenswrapper[4779]: E0929 19:22:23.134743 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/swift-operator-controller-manager-bc7dc7bd9-9jgwn" podUID="38ef5cba-94db-4e0d-b2ad-290293848c65" Sep 29 19:22:23 crc kubenswrapper[4779]: E0929 19:22:23.151849 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/heat-operator-controller-manager-5d889d78cf-fql2p" podUID="df41de35-4c6d-4313-8ccb-19dcead38269" Sep 29 19:22:23 crc kubenswrapper[4779]: E0929 19:22:23.156954 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/octavia-operator-controller-manager-76fcc6dc7c-mtqwc" podUID="1c3a147f-0c72-4889-80aa-8b53a0c9ea3f" Sep 29 19:22:23 crc kubenswrapper[4779]: E0929 19:22:23.157521 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/telemetry-operator-controller-manager-b8d54b5d7-tflz6" podUID="cdc62734-f794-43fc-9af8-752098cdf316" Sep 29 19:22:23 crc kubenswrapper[4779]: E0929 19:22:23.220934 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/openstack-baremetal-operator-controller-manager-6d776955-54f6z" podUID="49adbd52-e22f-4cc8-9ccb-6cb8c64f12f4" Sep 29 19:22:23 crc kubenswrapper[4779]: I0929 19:22:23.240472 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-bc7dc7bd9-9jgwn" event={"ID":"38ef5cba-94db-4e0d-b2ad-290293848c65","Type":"ContainerStarted","Data":"4e2c266866f5bd5e8be9d8c36691367e9045efeb0ff3b474cc21cb71c7746899"} Sep 29 19:22:23 crc kubenswrapper[4779]: I0929 19:22:23.240517 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-bc7dc7bd9-9jgwn" event={"ID":"38ef5cba-94db-4e0d-b2ad-290293848c65","Type":"ContainerStarted","Data":"a3a7013200631105a590c5346c9d54d7ee926201c56735b0f9d53d5316cbce2c"} Sep 29 19:22:23 crc kubenswrapper[4779]: E0929 19:22:23.242349 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/swift-operator@sha256:3c6f7d737e0196ec302f44354228d783ad3b210a75703dda3b39c15c01a67e8c\\\"\"" pod="openstack-operators/swift-operator-controller-manager-bc7dc7bd9-9jgwn" podUID="38ef5cba-94db-4e0d-b2ad-290293848c65" Sep 29 19:22:23 crc kubenswrapper[4779]: I0929 19:22:23.246745 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-6d776955-54f6z" event={"ID":"49adbd52-e22f-4cc8-9ccb-6cb8c64f12f4","Type":"ContainerStarted","Data":"cd7267eeb4aa1b9d6b641e96b35b4ecb6632585596e28e6c5559bf6778c1c0f3"} Sep 29 19:22:23 crc kubenswrapper[4779]: I0929 19:22:23.246780 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-6d776955-54f6z" event={"ID":"49adbd52-e22f-4cc8-9ccb-6cb8c64f12f4","Type":"ContainerStarted","Data":"458dcad3fa052552f11beb3b38c995e2f6be4caa1da9d61a4bff4b6933195517"} Sep 29 19:22:23 crc kubenswrapper[4779]: E0929 19:22:23.248611 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/openstack-baremetal-operator@sha256:e3f947e9034a951620a76eaf41ceec95eefcef0eacb251b10993d6820d5e1af6\\\"\"" pod="openstack-operators/openstack-baremetal-operator-controller-manager-6d776955-54f6z" podUID="49adbd52-e22f-4cc8-9ccb-6cb8c64f12f4" Sep 29 19:22:23 crc kubenswrapper[4779]: I0929 19:22:23.249938 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-f846cdb6-8tk8p" event={"ID":"3c04c1cb-ecb2-42a0-82e6-3c2842508041","Type":"ContainerStarted","Data":"9b01834996a8dd198e0d9a560abb23ffcb72b415dbad6bcd1c47688ca6445000"} Sep 29 19:22:23 crc kubenswrapper[4779]: I0929 19:22:23.249970 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-f846cdb6-8tk8p" event={"ID":"3c04c1cb-ecb2-42a0-82e6-3c2842508041","Type":"ContainerStarted","Data":"c1ef71f4cd3be88870866437361c57500d0554861a9c795d3516dc31e2fb3b99"} Sep 29 19:22:23 crc kubenswrapper[4779]: I0929 19:22:23.263003 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-b8d54b5d7-tflz6" event={"ID":"cdc62734-f794-43fc-9af8-752098cdf316","Type":"ContainerStarted","Data":"4c6b01b78249be4014bf23da7a6913dc9d03835c40353186a492dc6c81380375"} Sep 29 19:22:23 crc kubenswrapper[4779]: I0929 19:22:23.263041 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-b8d54b5d7-tflz6" event={"ID":"cdc62734-f794-43fc-9af8-752098cdf316","Type":"ContainerStarted","Data":"0307599f1ef6bdc11b7dcbaa761f4f74e3dfbb188301665cf1a7b485aef687f7"} Sep 29 19:22:23 crc kubenswrapper[4779]: E0929 19:22:23.270050 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/telemetry-operator@sha256:8fdf377daf05e2fa7346505017078fa81981dd945bf635a64c8022633c68118f\\\"\"" pod="openstack-operators/telemetry-operator-controller-manager-b8d54b5d7-tflz6" podUID="cdc62734-f794-43fc-9af8-752098cdf316" Sep 29 19:22:23 crc kubenswrapper[4779]: I0929 19:22:23.276513 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-64d7b59854-kvpnd" event={"ID":"9ae9e131-70db-4bd1-8347-c5714c2b4754","Type":"ContainerStarted","Data":"fb032ce9e5920dd3de77d6a2f82c4bda259e0178834dbd03f966f0c7f7b12513"} Sep 29 19:22:23 crc kubenswrapper[4779]: I0929 19:22:23.277890 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-5bd55b4bff-svxsl" event={"ID":"65ba17f1-ab1d-4e5b-b204-2ecc74c7daa1","Type":"ContainerStarted","Data":"033700ca5309471581d2f8d31557f120d3df75c131d9b496ddf2e85ef377837f"} Sep 29 19:22:23 crc kubenswrapper[4779]: I0929 19:22:23.279148 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-f66b554c6-dlgq2" event={"ID":"13740318-83f1-4384-9b4c-b8de793773d3","Type":"ContainerStarted","Data":"2e34bd39f45a060fdb079e3354d60f8e9b9ee43eb71b2a94a56b688fd420b99c"} Sep 29 19:22:23 crc kubenswrapper[4779]: I0929 19:22:23.282036 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-c7c776c96-94ldb" event={"ID":"9449cbcb-f74f-473e-9c0d-f1737b39c383","Type":"ContainerStarted","Data":"e1159b9e4401cf4bada992b5067520c3ca26afb3a8cf2fa26060733d1265e6f1"} Sep 29 19:22:23 crc kubenswrapper[4779]: I0929 19:22:23.316582 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-76fcc6dc7c-mtqwc" event={"ID":"1c3a147f-0c72-4889-80aa-8b53a0c9ea3f","Type":"ContainerStarted","Data":"64edb868e6aab6d501991095f27309dcea0777ca8d6a08162cc4084c1737ab23"} Sep 29 19:22:23 crc kubenswrapper[4779]: I0929 19:22:23.316635 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-76fcc6dc7c-mtqwc" event={"ID":"1c3a147f-0c72-4889-80aa-8b53a0c9ea3f","Type":"ContainerStarted","Data":"881c0bb084fe3acaf6aa513a6126d3dbef61bf1a1e6251df8b450dba8bd37b16"} Sep 29 19:22:23 crc kubenswrapper[4779]: E0929 19:22:23.321452 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/octavia-operator@sha256:4d08afd31dc5ded10c54a5541f514ac351e9b40a183285b3db27d0757a6354c8\\\"\"" pod="openstack-operators/octavia-operator-controller-manager-76fcc6dc7c-mtqwc" podUID="1c3a147f-0c72-4889-80aa-8b53a0c9ea3f" Sep 29 19:22:23 crc kubenswrapper[4779]: I0929 19:22:23.328842 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-88c7-k26dh" event={"ID":"833ffe0d-b2b2-4fd5-8094-ad9fe58f60c0","Type":"ContainerStarted","Data":"34a73724b147a89b5723ad4fa01976ee3040c531979b82d000c685a134d2ba96"} Sep 29 19:22:23 crc kubenswrapper[4779]: I0929 19:22:23.330982 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-84958c4d49-9xr2s" event={"ID":"68c5f3eb-52f4-4ede-ac89-f3a9aafe421b","Type":"ContainerStarted","Data":"9e17bfe3aa27f866d4c296d086fa759af4e134cb99438839b7777a5b8b040dc9"} Sep 29 19:22:23 crc kubenswrapper[4779]: I0929 19:22:23.333463 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-84f4f7b77b-kmgvf" event={"ID":"0e2748e0-ee66-45a1-b018-0798ad0ef293","Type":"ContainerStarted","Data":"83599fd18804063acf33af3db24a8d12d79b7c38c5bb75c33544b350f55263ea"} Sep 29 19:22:23 crc kubenswrapper[4779]: I0929 19:22:23.335265 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-7d857cc749-7pf7d" event={"ID":"50a9326b-f577-4994-ba3a-28f1ffb1df6c","Type":"ContainerStarted","Data":"4aa8d1f2ea65c23bb96ba72c60288c84f1b1c1031267e82467d348283aeeb753"} Sep 29 19:22:23 crc kubenswrapper[4779]: I0929 19:22:23.336661 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-manager-79d8469568-wl7d2" event={"ID":"5c9afdac-c252-4cd4-afb1-9d7fb43d86e1","Type":"ContainerStarted","Data":"fd41a2e47fe642dbd8c6eda76689d0e9d7a97bb27e7a0837b56ff8ca4afaca3a"} Sep 29 19:22:23 crc kubenswrapper[4779]: E0929 19:22:23.337969 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:225524223bf2a7f3a4ce95958fc9ca6fdab02745fb70374e8ff5bf1ddaceda4b\\\"\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-79d8469568-wl7d2" podUID="5c9afdac-c252-4cd4-afb1-9d7fb43d86e1" Sep 29 19:22:23 crc kubenswrapper[4779]: I0929 19:22:23.352850 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-6d68dbc695-6x9z9" event={"ID":"15635458-2ece-4c4b-a011-1c82d097bfdf","Type":"ContainerStarted","Data":"7f93fc7ae6f956c10fbe8adaed7f8806fcf1e4514859dd57b7f1ef9539788be4"} Sep 29 19:22:23 crc kubenswrapper[4779]: I0929 19:22:23.375424 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-76669f99c-crptq" event={"ID":"a15b1202-c010-40ae-be51-75fbb766fba0","Type":"ContainerStarted","Data":"e885040b79b31b33b1bbe688bad6d10954772481dabbe2f9514ca643e8dfcdab"} Sep 29 19:22:23 crc kubenswrapper[4779]: I0929 19:22:23.376617 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-9976ff44c-f7xwn" event={"ID":"63554382-d024-4d43-b5c5-b31b80d47749","Type":"ContainerStarted","Data":"049c1c40762f1c6fba9bb46ee40c344ae10d73276a95c02c3d27b9ff4cbca392"} Sep 29 19:22:23 crc kubenswrapper[4779]: I0929 19:22:23.376656 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-9976ff44c-f7xwn" event={"ID":"63554382-d024-4d43-b5c5-b31b80d47749","Type":"ContainerStarted","Data":"c2e94593294708b2be84bb107e38d23798643f4d8df47691c27e1cf0f54251c3"} Sep 29 19:22:23 crc kubenswrapper[4779]: E0929 19:22:23.379299 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/ovn-operator@sha256:1051afc168038fb814f75e7a5f07c588b295a83ebd143dcd8b46d799e31ad302\\\"\"" pod="openstack-operators/ovn-operator-controller-manager-9976ff44c-f7xwn" podUID="63554382-d024-4d43-b5c5-b31b80d47749" Sep 29 19:22:23 crc kubenswrapper[4779]: I0929 19:22:23.401230 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-589c58c6c-8fjvn" event={"ID":"dfc872d3-d6c0-42af-9ab7-7695257d969f","Type":"ContainerStarted","Data":"df44216b149e39b7e5e3b9926f5e8cae64421c783e2945cc93c613b0f84b1cda"} Sep 29 19:22:23 crc kubenswrapper[4779]: I0929 19:22:23.401280 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-589c58c6c-8fjvn" event={"ID":"dfc872d3-d6c0-42af-9ab7-7695257d969f","Type":"ContainerStarted","Data":"3991f5f67edf3229d133a2e7a5c7f394f0e181091dca18c0951e0e8899a52eef"} Sep 29 19:22:23 crc kubenswrapper[4779]: E0929 19:22:23.405773 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/placement-operator@sha256:a6b3408d79df6b6d4a467e49defaa4a9d9c088c94d0605a4fee0030c9ccc84d2\\\"\"" pod="openstack-operators/placement-operator-controller-manager-589c58c6c-8fjvn" podUID="dfc872d3-d6c0-42af-9ab7-7695257d969f" Sep 29 19:22:23 crc kubenswrapper[4779]: I0929 19:22:23.408402 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-7975b88857-46trm" event={"ID":"36748ad5-2673-4d95-ada2-7ff95f740fa9","Type":"ContainerStarted","Data":"ecb87ae891a40aeff043c6a8ce4ffb08e2d136043e1b79955d550248d5c79432"} Sep 29 19:22:23 crc kubenswrapper[4779]: I0929 19:22:23.426427 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-5d889d78cf-fql2p" event={"ID":"df41de35-4c6d-4313-8ccb-19dcead38269","Type":"ContainerStarted","Data":"3f14c11167e1b9ebc79faeb42de4838333d40096a4f7687c26ab904eb1a55e8c"} Sep 29 19:22:23 crc kubenswrapper[4779]: I0929 19:22:23.426472 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-5d889d78cf-fql2p" event={"ID":"df41de35-4c6d-4313-8ccb-19dcead38269","Type":"ContainerStarted","Data":"c9f70b88e9c2df883710f5b02e7f9f472321c1cbf879835310eb4ff44cef8acf"} Sep 29 19:22:23 crc kubenswrapper[4779]: E0929 19:22:23.427416 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/heat-operator@sha256:917e6dcc519277c46e42898bc9f0f066790fa7b9633fcde668cc8a68a547c13c\\\"\"" pod="openstack-operators/heat-operator-controller-manager-5d889d78cf-fql2p" podUID="df41de35-4c6d-4313-8ccb-19dcead38269" Sep 29 19:22:23 crc kubenswrapper[4779]: I0929 19:22:23.427640 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-9f4696d94-8ndmd" event={"ID":"0b008477-9497-4cb1-9b44-c8c0dacbd0ae","Type":"ContainerStarted","Data":"25f9a70b757dc4d6449af12578a140774280ee611ebc60d128f6c7e9a8fc2279"} Sep 29 19:22:24 crc kubenswrapper[4779]: I0929 19:22:24.441893 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-f846cdb6-8tk8p" event={"ID":"3c04c1cb-ecb2-42a0-82e6-3c2842508041","Type":"ContainerStarted","Data":"a4faecc8bc6a0a76730221a1a40b788fb17f44ed31884b26c001502f467e136e"} Sep 29 19:22:24 crc kubenswrapper[4779]: E0929 19:22:24.448855 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/octavia-operator@sha256:4d08afd31dc5ded10c54a5541f514ac351e9b40a183285b3db27d0757a6354c8\\\"\"" pod="openstack-operators/octavia-operator-controller-manager-76fcc6dc7c-mtqwc" podUID="1c3a147f-0c72-4889-80aa-8b53a0c9ea3f" Sep 29 19:22:24 crc kubenswrapper[4779]: E0929 19:22:24.448886 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/swift-operator@sha256:3c6f7d737e0196ec302f44354228d783ad3b210a75703dda3b39c15c01a67e8c\\\"\"" pod="openstack-operators/swift-operator-controller-manager-bc7dc7bd9-9jgwn" podUID="38ef5cba-94db-4e0d-b2ad-290293848c65" Sep 29 19:22:24 crc kubenswrapper[4779]: E0929 19:22:24.449698 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/openstack-baremetal-operator@sha256:e3f947e9034a951620a76eaf41ceec95eefcef0eacb251b10993d6820d5e1af6\\\"\"" pod="openstack-operators/openstack-baremetal-operator-controller-manager-6d776955-54f6z" podUID="49adbd52-e22f-4cc8-9ccb-6cb8c64f12f4" Sep 29 19:22:24 crc kubenswrapper[4779]: E0929 19:22:24.450052 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/ovn-operator@sha256:1051afc168038fb814f75e7a5f07c588b295a83ebd143dcd8b46d799e31ad302\\\"\"" pod="openstack-operators/ovn-operator-controller-manager-9976ff44c-f7xwn" podUID="63554382-d024-4d43-b5c5-b31b80d47749" Sep 29 19:22:24 crc kubenswrapper[4779]: E0929 19:22:24.450059 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/placement-operator@sha256:a6b3408d79df6b6d4a467e49defaa4a9d9c088c94d0605a4fee0030c9ccc84d2\\\"\"" pod="openstack-operators/placement-operator-controller-manager-589c58c6c-8fjvn" podUID="dfc872d3-d6c0-42af-9ab7-7695257d969f" Sep 29 19:22:24 crc kubenswrapper[4779]: E0929 19:22:24.450094 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/telemetry-operator@sha256:8fdf377daf05e2fa7346505017078fa81981dd945bf635a64c8022633c68118f\\\"\"" pod="openstack-operators/telemetry-operator-controller-manager-b8d54b5d7-tflz6" podUID="cdc62734-f794-43fc-9af8-752098cdf316" Sep 29 19:22:24 crc kubenswrapper[4779]: E0929 19:22:24.450102 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:225524223bf2a7f3a4ce95958fc9ca6fdab02745fb70374e8ff5bf1ddaceda4b\\\"\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-79d8469568-wl7d2" podUID="5c9afdac-c252-4cd4-afb1-9d7fb43d86e1" Sep 29 19:22:24 crc kubenswrapper[4779]: E0929 19:22:24.464492 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/heat-operator@sha256:917e6dcc519277c46e42898bc9f0f066790fa7b9633fcde668cc8a68a547c13c\\\"\"" pod="openstack-operators/heat-operator-controller-manager-5d889d78cf-fql2p" podUID="df41de35-4c6d-4313-8ccb-19dcead38269" Sep 29 19:22:24 crc kubenswrapper[4779]: I0929 19:22:24.596530 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-controller-manager-f846cdb6-8tk8p" podStartSLOduration=3.5965114099999997 podStartE2EDuration="3.59651141s" podCreationTimestamp="2025-09-29 19:22:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 19:22:24.586825586 +0000 UTC m=+855.471250696" watchObservedRunningTime="2025-09-29 19:22:24.59651141 +0000 UTC m=+855.480936510" Sep 29 19:22:25 crc kubenswrapper[4779]: I0929 19:22:25.447463 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-controller-manager-f846cdb6-8tk8p" Sep 29 19:22:28 crc kubenswrapper[4779]: I0929 19:22:28.336929 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-tmdsl"] Sep 29 19:22:28 crc kubenswrapper[4779]: I0929 19:22:28.344036 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-tmdsl" Sep 29 19:22:28 crc kubenswrapper[4779]: I0929 19:22:28.375647 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-tmdsl"] Sep 29 19:22:28 crc kubenswrapper[4779]: I0929 19:22:28.397145 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5cf2ee22-29b0-4662-a23b-c02ef8ddb9dc-catalog-content\") pod \"redhat-operators-tmdsl\" (UID: \"5cf2ee22-29b0-4662-a23b-c02ef8ddb9dc\") " pod="openshift-marketplace/redhat-operators-tmdsl" Sep 29 19:22:28 crc kubenswrapper[4779]: I0929 19:22:28.397336 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-br47j\" (UniqueName: \"kubernetes.io/projected/5cf2ee22-29b0-4662-a23b-c02ef8ddb9dc-kube-api-access-br47j\") pod \"redhat-operators-tmdsl\" (UID: \"5cf2ee22-29b0-4662-a23b-c02ef8ddb9dc\") " pod="openshift-marketplace/redhat-operators-tmdsl" Sep 29 19:22:28 crc kubenswrapper[4779]: I0929 19:22:28.397375 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5cf2ee22-29b0-4662-a23b-c02ef8ddb9dc-utilities\") pod \"redhat-operators-tmdsl\" (UID: \"5cf2ee22-29b0-4662-a23b-c02ef8ddb9dc\") " pod="openshift-marketplace/redhat-operators-tmdsl" Sep 29 19:22:28 crc kubenswrapper[4779]: I0929 19:22:28.499500 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5cf2ee22-29b0-4662-a23b-c02ef8ddb9dc-utilities\") pod \"redhat-operators-tmdsl\" (UID: \"5cf2ee22-29b0-4662-a23b-c02ef8ddb9dc\") " pod="openshift-marketplace/redhat-operators-tmdsl" Sep 29 19:22:28 crc kubenswrapper[4779]: I0929 19:22:28.499670 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5cf2ee22-29b0-4662-a23b-c02ef8ddb9dc-catalog-content\") pod \"redhat-operators-tmdsl\" (UID: \"5cf2ee22-29b0-4662-a23b-c02ef8ddb9dc\") " pod="openshift-marketplace/redhat-operators-tmdsl" Sep 29 19:22:28 crc kubenswrapper[4779]: I0929 19:22:28.499699 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-br47j\" (UniqueName: \"kubernetes.io/projected/5cf2ee22-29b0-4662-a23b-c02ef8ddb9dc-kube-api-access-br47j\") pod \"redhat-operators-tmdsl\" (UID: \"5cf2ee22-29b0-4662-a23b-c02ef8ddb9dc\") " pod="openshift-marketplace/redhat-operators-tmdsl" Sep 29 19:22:28 crc kubenswrapper[4779]: I0929 19:22:28.500496 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5cf2ee22-29b0-4662-a23b-c02ef8ddb9dc-catalog-content\") pod \"redhat-operators-tmdsl\" (UID: \"5cf2ee22-29b0-4662-a23b-c02ef8ddb9dc\") " pod="openshift-marketplace/redhat-operators-tmdsl" Sep 29 19:22:28 crc kubenswrapper[4779]: I0929 19:22:28.501013 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5cf2ee22-29b0-4662-a23b-c02ef8ddb9dc-utilities\") pod \"redhat-operators-tmdsl\" (UID: \"5cf2ee22-29b0-4662-a23b-c02ef8ddb9dc\") " pod="openshift-marketplace/redhat-operators-tmdsl" Sep 29 19:22:28 crc kubenswrapper[4779]: I0929 19:22:28.519093 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-br47j\" (UniqueName: \"kubernetes.io/projected/5cf2ee22-29b0-4662-a23b-c02ef8ddb9dc-kube-api-access-br47j\") pod \"redhat-operators-tmdsl\" (UID: \"5cf2ee22-29b0-4662-a23b-c02ef8ddb9dc\") " pod="openshift-marketplace/redhat-operators-tmdsl" Sep 29 19:22:28 crc kubenswrapper[4779]: I0929 19:22:28.685946 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-tmdsl" Sep 29 19:22:31 crc kubenswrapper[4779]: I0929 19:22:31.669934 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-controller-manager-f846cdb6-8tk8p" Sep 29 19:22:32 crc kubenswrapper[4779]: I0929 19:22:32.511371 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-76669f99c-crptq" event={"ID":"a15b1202-c010-40ae-be51-75fbb766fba0","Type":"ContainerStarted","Data":"0344177c9140938790d0eeb3ce93d90476fec4ec437b78336dbc5c1b21650abc"} Sep 29 19:22:32 crc kubenswrapper[4779]: I0929 19:22:32.517995 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-84958c4d49-9xr2s" event={"ID":"68c5f3eb-52f4-4ede-ac89-f3a9aafe421b","Type":"ContainerStarted","Data":"18724e7776cf5067801d5e79166feb7436b03eb5ea92683be88ae4727d10777f"} Sep 29 19:22:32 crc kubenswrapper[4779]: I0929 19:22:32.548207 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-tmdsl"] Sep 29 19:22:32 crc kubenswrapper[4779]: W0929 19:22:32.613497 4779 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5cf2ee22_29b0_4662_a23b_c02ef8ddb9dc.slice/crio-957152fa9f347121421ab88a97d2b427c14457f73ab869cdfe2b2a367e50aa35 WatchSource:0}: Error finding container 957152fa9f347121421ab88a97d2b427c14457f73ab869cdfe2b2a367e50aa35: Status 404 returned error can't find the container with id 957152fa9f347121421ab88a97d2b427c14457f73ab869cdfe2b2a367e50aa35 Sep 29 19:22:33 crc kubenswrapper[4779]: I0929 19:22:33.526537 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-f66b554c6-dlgq2" event={"ID":"13740318-83f1-4384-9b4c-b8de793773d3","Type":"ContainerStarted","Data":"a9bb919a783140b19b51ca5ff13746f226b584fea4bef92b827f62b11021923f"} Sep 29 19:22:33 crc kubenswrapper[4779]: I0929 19:22:33.528232 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-7d857cc749-7pf7d" event={"ID":"50a9326b-f577-4994-ba3a-28f1ffb1df6c","Type":"ContainerStarted","Data":"24cf00d5acc22a81b717d9db4be5622c2827034bddf560ba1bd4b274e502490e"} Sep 29 19:22:33 crc kubenswrapper[4779]: I0929 19:22:33.530145 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-6d68dbc695-6x9z9" event={"ID":"15635458-2ece-4c4b-a011-1c82d097bfdf","Type":"ContainerStarted","Data":"37a72d5c2b26ffb0c17a2a22822ae068069c41734587ca0903034c5999814e81"} Sep 29 19:22:33 crc kubenswrapper[4779]: I0929 19:22:33.530201 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-6d68dbc695-6x9z9" event={"ID":"15635458-2ece-4c4b-a011-1c82d097bfdf","Type":"ContainerStarted","Data":"ed0e57ba8ce5371f5accf4cf92b94671c93535f464c0d70fdd1770d4a84514c1"} Sep 29 19:22:33 crc kubenswrapper[4779]: I0929 19:22:33.530259 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/manila-operator-controller-manager-6d68dbc695-6x9z9" Sep 29 19:22:33 crc kubenswrapper[4779]: I0929 19:22:33.531771 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-76669f99c-crptq" event={"ID":"a15b1202-c010-40ae-be51-75fbb766fba0","Type":"ContainerStarted","Data":"3b4427f691656007955975aa3f0f1aae453221ed194b44e1d889e679879778d7"} Sep 29 19:22:33 crc kubenswrapper[4779]: I0929 19:22:33.532207 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/watcher-operator-controller-manager-76669f99c-crptq" Sep 29 19:22:33 crc kubenswrapper[4779]: I0929 19:22:33.532823 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-6ff8b75857-l2cw6" event={"ID":"7a72bb32-1401-4fb3-a8b5-e2c9d3c7e997","Type":"ContainerStarted","Data":"de887b4619634645a494b5f26bfd15f66c6bfdc742fcf5d8b175a0d2f270111e"} Sep 29 19:22:33 crc kubenswrapper[4779]: I0929 19:22:33.532845 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-6ff8b75857-l2cw6" event={"ID":"7a72bb32-1401-4fb3-a8b5-e2c9d3c7e997","Type":"ContainerStarted","Data":"161dd5324020c7e01c535046622c8286e8b53e51636f37fb75d2e196b3a35750"} Sep 29 19:22:33 crc kubenswrapper[4779]: I0929 19:22:33.533167 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/barbican-operator-controller-manager-6ff8b75857-l2cw6" Sep 29 19:22:33 crc kubenswrapper[4779]: I0929 19:22:33.538241 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-7975b88857-46trm" event={"ID":"36748ad5-2673-4d95-ada2-7ff95f740fa9","Type":"ContainerStarted","Data":"efee57dc8e9bd46d5bc0b2517393753e722241663133273fc0557bdabd67eb72"} Sep 29 19:22:33 crc kubenswrapper[4779]: I0929 19:22:33.540136 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-84f4f7b77b-kmgvf" event={"ID":"0e2748e0-ee66-45a1-b018-0798ad0ef293","Type":"ContainerStarted","Data":"cd37a2c5bddac58ebbb1ff46322e99d7cb78347d4ab8c6dea8e71eade11636fd"} Sep 29 19:22:33 crc kubenswrapper[4779]: I0929 19:22:33.544907 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-c7c776c96-94ldb" event={"ID":"9449cbcb-f74f-473e-9c0d-f1737b39c383","Type":"ContainerStarted","Data":"fb831db776bd90848ad5e94d7a4e25e7a805b3699b893024f426a8f9c8e44701"} Sep 29 19:22:33 crc kubenswrapper[4779]: I0929 19:22:33.544938 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-c7c776c96-94ldb" event={"ID":"9449cbcb-f74f-473e-9c0d-f1737b39c383","Type":"ContainerStarted","Data":"c5fce975bbf3b7f0271c6fabd878a3f0cf837d6b61c67bf0924dc7329415c1e1"} Sep 29 19:22:33 crc kubenswrapper[4779]: I0929 19:22:33.545552 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/nova-operator-controller-manager-c7c776c96-94ldb" Sep 29 19:22:33 crc kubenswrapper[4779]: I0929 19:22:33.548248 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-9f4696d94-8ndmd" event={"ID":"0b008477-9497-4cb1-9b44-c8c0dacbd0ae","Type":"ContainerStarted","Data":"954d4623ea3b46ae64b50424ba5d97d9497e522e8100cfd6c2359055b0edf5a3"} Sep 29 19:22:33 crc kubenswrapper[4779]: I0929 19:22:33.548273 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-9f4696d94-8ndmd" event={"ID":"0b008477-9497-4cb1-9b44-c8c0dacbd0ae","Type":"ContainerStarted","Data":"9aee0f8631b585293dd8ee9417d7c3c97c5dc1c529386344ce7632d43ff134a5"} Sep 29 19:22:33 crc kubenswrapper[4779]: I0929 19:22:33.548393 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/horizon-operator-controller-manager-9f4696d94-8ndmd" Sep 29 19:22:33 crc kubenswrapper[4779]: I0929 19:22:33.556390 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/manila-operator-controller-manager-6d68dbc695-6x9z9" podStartSLOduration=3.9295458070000002 podStartE2EDuration="13.556374469s" podCreationTimestamp="2025-09-29 19:22:20 +0000 UTC" firstStartedPulling="2025-09-29 19:22:22.472094307 +0000 UTC m=+853.356519407" lastFinishedPulling="2025-09-29 19:22:32.098922969 +0000 UTC m=+862.983348069" observedRunningTime="2025-09-29 19:22:33.548054252 +0000 UTC m=+864.432479352" watchObservedRunningTime="2025-09-29 19:22:33.556374469 +0000 UTC m=+864.440799569" Sep 29 19:22:33 crc kubenswrapper[4779]: I0929 19:22:33.558750 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-84958c4d49-9xr2s" event={"ID":"68c5f3eb-52f4-4ede-ac89-f3a9aafe421b","Type":"ContainerStarted","Data":"96ad0cb3293634b9c749f937cec5ce94046923a7071473e4c096fabe159c9951"} Sep 29 19:22:33 crc kubenswrapper[4779]: I0929 19:22:33.559357 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/glance-operator-controller-manager-84958c4d49-9xr2s" Sep 29 19:22:33 crc kubenswrapper[4779]: I0929 19:22:33.561909 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-644bddb6d8-txbkn" event={"ID":"5d20194a-c49a-4da1-a081-23d5c3bde845","Type":"ContainerStarted","Data":"dbf84d0b265f27d6178e9f5aae81880163c16859799dbce42806a983c80fed92"} Sep 29 19:22:33 crc kubenswrapper[4779]: I0929 19:22:33.561936 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-644bddb6d8-txbkn" event={"ID":"5d20194a-c49a-4da1-a081-23d5c3bde845","Type":"ContainerStarted","Data":"0ac6dd15fca5da38a89044c66043f2a6a4cd4b2026cd12a6484636642abed9f6"} Sep 29 19:22:33 crc kubenswrapper[4779]: I0929 19:22:33.562073 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/cinder-operator-controller-manager-644bddb6d8-txbkn" Sep 29 19:22:33 crc kubenswrapper[4779]: I0929 19:22:33.565996 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-64d7b59854-kvpnd" event={"ID":"9ae9e131-70db-4bd1-8347-c5714c2b4754","Type":"ContainerStarted","Data":"b566ab383c27b76de6f202896bd3be1d1238c7a6ec30eef7e80074b00bf7f3a1"} Sep 29 19:22:33 crc kubenswrapper[4779]: I0929 19:22:33.567390 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-5bd55b4bff-svxsl" event={"ID":"65ba17f1-ab1d-4e5b-b204-2ecc74c7daa1","Type":"ContainerStarted","Data":"31b59edabad05d0f1f7faef30879693657fef1f95f9e5b1ec44f4b6e83369ae9"} Sep 29 19:22:33 crc kubenswrapper[4779]: I0929 19:22:33.567420 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-5bd55b4bff-svxsl" event={"ID":"65ba17f1-ab1d-4e5b-b204-2ecc74c7daa1","Type":"ContainerStarted","Data":"02cb696d856f4e3b88fc1026caaa92955858e9c0e29de2745118c978b84f15ba"} Sep 29 19:22:33 crc kubenswrapper[4779]: I0929 19:22:33.568013 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/keystone-operator-controller-manager-5bd55b4bff-svxsl" Sep 29 19:22:33 crc kubenswrapper[4779]: I0929 19:22:33.571263 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-88c7-k26dh" event={"ID":"833ffe0d-b2b2-4fd5-8094-ad9fe58f60c0","Type":"ContainerStarted","Data":"f202785c3fe4e95fec589a8965f3056dc7895b5ae27abeab306f3b3d4dcf9ea2"} Sep 29 19:22:33 crc kubenswrapper[4779]: I0929 19:22:33.571657 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/mariadb-operator-controller-manager-88c7-k26dh" Sep 29 19:22:33 crc kubenswrapper[4779]: I0929 19:22:33.573263 4779 generic.go:334] "Generic (PLEG): container finished" podID="5cf2ee22-29b0-4662-a23b-c02ef8ddb9dc" containerID="250e53a92e8f796144c2edcc62aee9c920b738634490d79273fafc852dce095a" exitCode=0 Sep 29 19:22:33 crc kubenswrapper[4779]: I0929 19:22:33.573299 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tmdsl" event={"ID":"5cf2ee22-29b0-4662-a23b-c02ef8ddb9dc","Type":"ContainerDied","Data":"250e53a92e8f796144c2edcc62aee9c920b738634490d79273fafc852dce095a"} Sep 29 19:22:33 crc kubenswrapper[4779]: I0929 19:22:33.573331 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tmdsl" event={"ID":"5cf2ee22-29b0-4662-a23b-c02ef8ddb9dc","Type":"ContainerStarted","Data":"957152fa9f347121421ab88a97d2b427c14457f73ab869cdfe2b2a367e50aa35"} Sep 29 19:22:33 crc kubenswrapper[4779]: I0929 19:22:33.586350 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/watcher-operator-controller-manager-76669f99c-crptq" podStartSLOduration=3.280484549 podStartE2EDuration="12.586334035s" podCreationTimestamp="2025-09-29 19:22:21 +0000 UTC" firstStartedPulling="2025-09-29 19:22:22.793552416 +0000 UTC m=+853.677977516" lastFinishedPulling="2025-09-29 19:22:32.099401892 +0000 UTC m=+862.983827002" observedRunningTime="2025-09-29 19:22:33.581623397 +0000 UTC m=+864.466048497" watchObservedRunningTime="2025-09-29 19:22:33.586334035 +0000 UTC m=+864.470759135" Sep 29 19:22:33 crc kubenswrapper[4779]: I0929 19:22:33.610560 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/barbican-operator-controller-manager-6ff8b75857-l2cw6" podStartSLOduration=3.580795504 podStartE2EDuration="13.610544825s" podCreationTimestamp="2025-09-29 19:22:20 +0000 UTC" firstStartedPulling="2025-09-29 19:22:22.066254819 +0000 UTC m=+852.950679919" lastFinishedPulling="2025-09-29 19:22:32.09600414 +0000 UTC m=+862.980429240" observedRunningTime="2025-09-29 19:22:33.607969355 +0000 UTC m=+864.492394455" watchObservedRunningTime="2025-09-29 19:22:33.610544825 +0000 UTC m=+864.494969925" Sep 29 19:22:33 crc kubenswrapper[4779]: I0929 19:22:33.630309 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/nova-operator-controller-manager-c7c776c96-94ldb" podStartSLOduration=3.977826113 podStartE2EDuration="13.630293913s" podCreationTimestamp="2025-09-29 19:22:20 +0000 UTC" firstStartedPulling="2025-09-29 19:22:22.472520769 +0000 UTC m=+853.356945869" lastFinishedPulling="2025-09-29 19:22:32.124988569 +0000 UTC m=+863.009413669" observedRunningTime="2025-09-29 19:22:33.627366423 +0000 UTC m=+864.511791523" watchObservedRunningTime="2025-09-29 19:22:33.630293913 +0000 UTC m=+864.514719003" Sep 29 19:22:33 crc kubenswrapper[4779]: I0929 19:22:33.647132 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/keystone-operator-controller-manager-5bd55b4bff-svxsl" podStartSLOduration=3.980801923 podStartE2EDuration="13.647114281s" podCreationTimestamp="2025-09-29 19:22:20 +0000 UTC" firstStartedPulling="2025-09-29 19:22:22.431069349 +0000 UTC m=+853.315494469" lastFinishedPulling="2025-09-29 19:22:32.097381727 +0000 UTC m=+862.981806827" observedRunningTime="2025-09-29 19:22:33.644331546 +0000 UTC m=+864.528756646" watchObservedRunningTime="2025-09-29 19:22:33.647114281 +0000 UTC m=+864.531539381" Sep 29 19:22:33 crc kubenswrapper[4779]: I0929 19:22:33.675206 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/cinder-operator-controller-manager-644bddb6d8-txbkn" podStartSLOduration=3.6199089300000002 podStartE2EDuration="13.675191556s" podCreationTimestamp="2025-09-29 19:22:20 +0000 UTC" firstStartedPulling="2025-09-29 19:22:22.063847024 +0000 UTC m=+852.948272124" lastFinishedPulling="2025-09-29 19:22:32.11912965 +0000 UTC m=+863.003554750" observedRunningTime="2025-09-29 19:22:33.671925777 +0000 UTC m=+864.556350877" watchObservedRunningTime="2025-09-29 19:22:33.675191556 +0000 UTC m=+864.559616656" Sep 29 19:22:33 crc kubenswrapper[4779]: I0929 19:22:33.726493 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/glance-operator-controller-manager-84958c4d49-9xr2s" podStartSLOduration=4.102075658 podStartE2EDuration="13.726478594s" podCreationTimestamp="2025-09-29 19:22:20 +0000 UTC" firstStartedPulling="2025-09-29 19:22:22.472419736 +0000 UTC m=+853.356844826" lastFinishedPulling="2025-09-29 19:22:32.096822642 +0000 UTC m=+862.981247762" observedRunningTime="2025-09-29 19:22:33.724838179 +0000 UTC m=+864.609263279" watchObservedRunningTime="2025-09-29 19:22:33.726478594 +0000 UTC m=+864.610903694" Sep 29 19:22:33 crc kubenswrapper[4779]: I0929 19:22:33.765089 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/mariadb-operator-controller-manager-88c7-k26dh" podStartSLOduration=4.466222169 podStartE2EDuration="13.765073635s" podCreationTimestamp="2025-09-29 19:22:20 +0000 UTC" firstStartedPulling="2025-09-29 19:22:22.820205702 +0000 UTC m=+853.704630802" lastFinishedPulling="2025-09-29 19:22:32.119057148 +0000 UTC m=+863.003482268" observedRunningTime="2025-09-29 19:22:33.762124715 +0000 UTC m=+864.646549815" watchObservedRunningTime="2025-09-29 19:22:33.765073635 +0000 UTC m=+864.649498735" Sep 29 19:22:33 crc kubenswrapper[4779]: I0929 19:22:33.792276 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/horizon-operator-controller-manager-9f4696d94-8ndmd" podStartSLOduration=4.169737972 podStartE2EDuration="13.792260616s" podCreationTimestamp="2025-09-29 19:22:20 +0000 UTC" firstStartedPulling="2025-09-29 19:22:22.475634014 +0000 UTC m=+853.360059124" lastFinishedPulling="2025-09-29 19:22:32.098156668 +0000 UTC m=+862.982581768" observedRunningTime="2025-09-29 19:22:33.789299435 +0000 UTC m=+864.673724545" watchObservedRunningTime="2025-09-29 19:22:33.792260616 +0000 UTC m=+864.676685716" Sep 29 19:22:34 crc kubenswrapper[4779]: I0929 19:22:34.582900 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tmdsl" event={"ID":"5cf2ee22-29b0-4662-a23b-c02ef8ddb9dc","Type":"ContainerStarted","Data":"6fcc1620c1df9ef343de40bbdbe60d6ddd030375dbaa6a0731c358c501e01722"} Sep 29 19:22:34 crc kubenswrapper[4779]: I0929 19:22:34.585232 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-64d7b59854-kvpnd" event={"ID":"9ae9e131-70db-4bd1-8347-c5714c2b4754","Type":"ContainerStarted","Data":"c867e2f748c74e36df7cfd27b32a29dc21a76327f3a27312525e726121af2865"} Sep 29 19:22:34 crc kubenswrapper[4779]: I0929 19:22:34.585288 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/neutron-operator-controller-manager-64d7b59854-kvpnd" Sep 29 19:22:34 crc kubenswrapper[4779]: I0929 19:22:34.587010 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-7975b88857-46trm" event={"ID":"36748ad5-2673-4d95-ada2-7ff95f740fa9","Type":"ContainerStarted","Data":"9603b39018e74899b7b7d5a951a6f3541789ff1c43290cafd5f9c138194ccb20"} Sep 29 19:22:34 crc kubenswrapper[4779]: I0929 19:22:34.587366 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/ironic-operator-controller-manager-7975b88857-46trm" Sep 29 19:22:34 crc kubenswrapper[4779]: I0929 19:22:34.590147 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-84f4f7b77b-kmgvf" event={"ID":"0e2748e0-ee66-45a1-b018-0798ad0ef293","Type":"ContainerStarted","Data":"e28b39f2d8e94818111732059efbfdd7c54f644b42fec06939788fdeb8634ef5"} Sep 29 19:22:34 crc kubenswrapper[4779]: I0929 19:22:34.590514 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/designate-operator-controller-manager-84f4f7b77b-kmgvf" Sep 29 19:22:34 crc kubenswrapper[4779]: I0929 19:22:34.591873 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-f66b554c6-dlgq2" event={"ID":"13740318-83f1-4384-9b4c-b8de793773d3","Type":"ContainerStarted","Data":"a31b8dbb9a5d55b5bceb37286408c6ad2be82fc15d614dd8cf67d0f7cca9c4a2"} Sep 29 19:22:34 crc kubenswrapper[4779]: I0929 19:22:34.592195 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/test-operator-controller-manager-f66b554c6-dlgq2" Sep 29 19:22:34 crc kubenswrapper[4779]: I0929 19:22:34.593735 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-7d857cc749-7pf7d" event={"ID":"50a9326b-f577-4994-ba3a-28f1ffb1df6c","Type":"ContainerStarted","Data":"975272197f84c5466c4d018a6b6742517b134e3860534a1b3c133ea9db21b0ff"} Sep 29 19:22:34 crc kubenswrapper[4779]: I0929 19:22:34.594211 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/infra-operator-controller-manager-7d857cc749-7pf7d" Sep 29 19:22:34 crc kubenswrapper[4779]: I0929 19:22:34.596587 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-88c7-k26dh" event={"ID":"833ffe0d-b2b2-4fd5-8094-ad9fe58f60c0","Type":"ContainerStarted","Data":"d6d2b448041610414ddaf63ec5e80c8192cf8700d26e37ed6ed5730b249be573"} Sep 29 19:22:34 crc kubenswrapper[4779]: I0929 19:22:34.622042 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/designate-operator-controller-manager-84f4f7b77b-kmgvf" podStartSLOduration=4.952944581 podStartE2EDuration="14.622025014s" podCreationTimestamp="2025-09-29 19:22:20 +0000 UTC" firstStartedPulling="2025-09-29 19:22:22.430308829 +0000 UTC m=+853.314733939" lastFinishedPulling="2025-09-29 19:22:32.099389282 +0000 UTC m=+862.983814372" observedRunningTime="2025-09-29 19:22:34.617070009 +0000 UTC m=+865.501495169" watchObservedRunningTime="2025-09-29 19:22:34.622025014 +0000 UTC m=+865.506450104" Sep 29 19:22:34 crc kubenswrapper[4779]: I0929 19:22:34.639351 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/ironic-operator-controller-manager-7975b88857-46trm" podStartSLOduration=5.314338898 podStartE2EDuration="14.639295214s" podCreationTimestamp="2025-09-29 19:22:20 +0000 UTC" firstStartedPulling="2025-09-29 19:22:22.815564176 +0000 UTC m=+853.699989276" lastFinishedPulling="2025-09-29 19:22:32.140520492 +0000 UTC m=+863.024945592" observedRunningTime="2025-09-29 19:22:34.631514603 +0000 UTC m=+865.515939713" watchObservedRunningTime="2025-09-29 19:22:34.639295214 +0000 UTC m=+865.523720324" Sep 29 19:22:34 crc kubenswrapper[4779]: I0929 19:22:34.655174 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/test-operator-controller-manager-f66b554c6-dlgq2" podStartSLOduration=5.463614746 podStartE2EDuration="14.655154997s" podCreationTimestamp="2025-09-29 19:22:20 +0000 UTC" firstStartedPulling="2025-09-29 19:22:22.907241394 +0000 UTC m=+853.791666494" lastFinishedPulling="2025-09-29 19:22:32.098781645 +0000 UTC m=+862.983206745" observedRunningTime="2025-09-29 19:22:34.647748095 +0000 UTC m=+865.532173225" watchObservedRunningTime="2025-09-29 19:22:34.655154997 +0000 UTC m=+865.539580107" Sep 29 19:22:34 crc kubenswrapper[4779]: I0929 19:22:34.679007 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/infra-operator-controller-manager-7d857cc749-7pf7d" podStartSLOduration=5.368138133 podStartE2EDuration="14.678987056s" podCreationTimestamp="2025-09-29 19:22:20 +0000 UTC" firstStartedPulling="2025-09-29 19:22:22.815512344 +0000 UTC m=+853.699937444" lastFinishedPulling="2025-09-29 19:22:32.126361267 +0000 UTC m=+863.010786367" observedRunningTime="2025-09-29 19:22:34.673809945 +0000 UTC m=+865.558235055" watchObservedRunningTime="2025-09-29 19:22:34.678987056 +0000 UTC m=+865.563412166" Sep 29 19:22:34 crc kubenswrapper[4779]: I0929 19:22:34.703564 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/neutron-operator-controller-manager-64d7b59854-kvpnd" podStartSLOduration=5.354122102 podStartE2EDuration="14.703545375s" podCreationTimestamp="2025-09-29 19:22:20 +0000 UTC" firstStartedPulling="2025-09-29 19:22:22.772517803 +0000 UTC m=+853.656942903" lastFinishedPulling="2025-09-29 19:22:32.121941056 +0000 UTC m=+863.006366176" observedRunningTime="2025-09-29 19:22:34.696067711 +0000 UTC m=+865.580492811" watchObservedRunningTime="2025-09-29 19:22:34.703545375 +0000 UTC m=+865.587970485" Sep 29 19:22:35 crc kubenswrapper[4779]: I0929 19:22:35.607110 4779 generic.go:334] "Generic (PLEG): container finished" podID="5cf2ee22-29b0-4662-a23b-c02ef8ddb9dc" containerID="6fcc1620c1df9ef343de40bbdbe60d6ddd030375dbaa6a0731c358c501e01722" exitCode=0 Sep 29 19:22:35 crc kubenswrapper[4779]: I0929 19:22:35.609443 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tmdsl" event={"ID":"5cf2ee22-29b0-4662-a23b-c02ef8ddb9dc","Type":"ContainerDied","Data":"6fcc1620c1df9ef343de40bbdbe60d6ddd030375dbaa6a0731c358c501e01722"} Sep 29 19:22:36 crc kubenswrapper[4779]: I0929 19:22:36.617527 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tmdsl" event={"ID":"5cf2ee22-29b0-4662-a23b-c02ef8ddb9dc","Type":"ContainerStarted","Data":"0316939498d5602daf7f7b6939c68b83e400f655c19f28ec3aaebae5263922c9"} Sep 29 19:22:36 crc kubenswrapper[4779]: I0929 19:22:36.642784 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-tmdsl" podStartSLOduration=6.158466814 podStartE2EDuration="8.64274959s" podCreationTimestamp="2025-09-29 19:22:28 +0000 UTC" firstStartedPulling="2025-09-29 19:22:33.575228853 +0000 UTC m=+864.459653953" lastFinishedPulling="2025-09-29 19:22:36.059511619 +0000 UTC m=+866.943936729" observedRunningTime="2025-09-29 19:22:36.640628852 +0000 UTC m=+867.525053952" watchObservedRunningTime="2025-09-29 19:22:36.64274959 +0000 UTC m=+867.527174740" Sep 29 19:22:38 crc kubenswrapper[4779]: I0929 19:22:38.686926 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-tmdsl" Sep 29 19:22:38 crc kubenswrapper[4779]: I0929 19:22:38.687017 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-tmdsl" Sep 29 19:22:39 crc kubenswrapper[4779]: I0929 19:22:39.729367 4779 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-tmdsl" podUID="5cf2ee22-29b0-4662-a23b-c02ef8ddb9dc" containerName="registry-server" probeResult="failure" output=< Sep 29 19:22:39 crc kubenswrapper[4779]: timeout: failed to connect service ":50051" within 1s Sep 29 19:22:39 crc kubenswrapper[4779]: > Sep 29 19:22:40 crc kubenswrapper[4779]: I0929 19:22:40.768765 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/barbican-operator-controller-manager-6ff8b75857-l2cw6" Sep 29 19:22:40 crc kubenswrapper[4779]: I0929 19:22:40.836478 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/cinder-operator-controller-manager-644bddb6d8-txbkn" Sep 29 19:22:40 crc kubenswrapper[4779]: I0929 19:22:40.859788 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/designate-operator-controller-manager-84f4f7b77b-kmgvf" Sep 29 19:22:40 crc kubenswrapper[4779]: I0929 19:22:40.871015 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/glance-operator-controller-manager-84958c4d49-9xr2s" Sep 29 19:22:40 crc kubenswrapper[4779]: I0929 19:22:40.930570 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/horizon-operator-controller-manager-9f4696d94-8ndmd" Sep 29 19:22:41 crc kubenswrapper[4779]: I0929 19:22:41.038530 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/manila-operator-controller-manager-6d68dbc695-6x9z9" Sep 29 19:22:41 crc kubenswrapper[4779]: I0929 19:22:41.136054 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/nova-operator-controller-manager-c7c776c96-94ldb" Sep 29 19:22:41 crc kubenswrapper[4779]: I0929 19:22:41.271059 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/ironic-operator-controller-manager-7975b88857-46trm" Sep 29 19:22:41 crc kubenswrapper[4779]: I0929 19:22:41.308673 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/keystone-operator-controller-manager-5bd55b4bff-svxsl" Sep 29 19:22:41 crc kubenswrapper[4779]: I0929 19:22:41.337346 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/mariadb-operator-controller-manager-88c7-k26dh" Sep 29 19:22:41 crc kubenswrapper[4779]: I0929 19:22:41.372938 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/neutron-operator-controller-manager-64d7b59854-kvpnd" Sep 29 19:22:41 crc kubenswrapper[4779]: I0929 19:22:41.581004 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/infra-operator-controller-manager-7d857cc749-7pf7d" Sep 29 19:22:41 crc kubenswrapper[4779]: I0929 19:22:41.642595 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/test-operator-controller-manager-f66b554c6-dlgq2" Sep 29 19:22:41 crc kubenswrapper[4779]: I0929 19:22:41.651363 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/watcher-operator-controller-manager-76669f99c-crptq" Sep 29 19:22:46 crc kubenswrapper[4779]: I0929 19:22:46.748359 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-manager-79d8469568-wl7d2" event={"ID":"5c9afdac-c252-4cd4-afb1-9d7fb43d86e1","Type":"ContainerStarted","Data":"f7f1e662bc4a4da72dd678815a1b1386275f4ef97ef695fda1e3ec9f086a3321"} Sep 29 19:22:46 crc kubenswrapper[4779]: I0929 19:22:46.813438 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/rabbitmq-cluster-operator-manager-79d8469568-wl7d2" podStartSLOduration=9.060186021 podStartE2EDuration="25.813402888s" podCreationTimestamp="2025-09-29 19:22:21 +0000 UTC" firstStartedPulling="2025-09-29 19:22:22.9206933 +0000 UTC m=+853.805118400" lastFinishedPulling="2025-09-29 19:22:39.673910167 +0000 UTC m=+870.558335267" observedRunningTime="2025-09-29 19:22:46.778156878 +0000 UTC m=+877.662582018" watchObservedRunningTime="2025-09-29 19:22:46.813402888 +0000 UTC m=+877.697828028" Sep 29 19:22:48 crc kubenswrapper[4779]: I0929 19:22:48.752651 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-tmdsl" Sep 29 19:22:48 crc kubenswrapper[4779]: I0929 19:22:48.819513 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-tmdsl" Sep 29 19:22:49 crc kubenswrapper[4779]: I0929 19:22:49.007642 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-tmdsl"] Sep 29 19:22:50 crc kubenswrapper[4779]: I0929 19:22:50.776078 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-tmdsl" podUID="5cf2ee22-29b0-4662-a23b-c02ef8ddb9dc" containerName="registry-server" containerID="cri-o://0316939498d5602daf7f7b6939c68b83e400f655c19f28ec3aaebae5263922c9" gracePeriod=2 Sep 29 19:22:51 crc kubenswrapper[4779]: I0929 19:22:51.783346 4779 generic.go:334] "Generic (PLEG): container finished" podID="5cf2ee22-29b0-4662-a23b-c02ef8ddb9dc" containerID="0316939498d5602daf7f7b6939c68b83e400f655c19f28ec3aaebae5263922c9" exitCode=0 Sep 29 19:22:51 crc kubenswrapper[4779]: I0929 19:22:51.785668 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tmdsl" event={"ID":"5cf2ee22-29b0-4662-a23b-c02ef8ddb9dc","Type":"ContainerDied","Data":"0316939498d5602daf7f7b6939c68b83e400f655c19f28ec3aaebae5263922c9"} Sep 29 19:22:51 crc kubenswrapper[4779]: E0929 19:22:51.906147 4779 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/heat-operator@sha256:917e6dcc519277c46e42898bc9f0f066790fa7b9633fcde668cc8a68a547c13c" Sep 29 19:22:51 crc kubenswrapper[4779]: E0929 19:22:51.906399 4779 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/heat-operator@sha256:917e6dcc519277c46e42898bc9f0f066790fa7b9633fcde668cc8a68a547c13c,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-wdnvw,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod heat-operator-controller-manager-5d889d78cf-fql2p_openstack-operators(df41de35-4c6d-4313-8ccb-19dcead38269): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Sep 29 19:22:51 crc kubenswrapper[4779]: E0929 19:22:51.907619 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/heat-operator-controller-manager-5d889d78cf-fql2p" podUID="df41de35-4c6d-4313-8ccb-19dcead38269" Sep 29 19:22:52 crc kubenswrapper[4779]: E0929 19:22:52.340077 4779 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/telemetry-operator@sha256:8fdf377daf05e2fa7346505017078fa81981dd945bf635a64c8022633c68118f" Sep 29 19:22:52 crc kubenswrapper[4779]: E0929 19:22:52.340491 4779 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/telemetry-operator@sha256:8fdf377daf05e2fa7346505017078fa81981dd945bf635a64c8022633c68118f,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-vhbwl,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod telemetry-operator-controller-manager-b8d54b5d7-tflz6_openstack-operators(cdc62734-f794-43fc-9af8-752098cdf316): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Sep 29 19:22:52 crc kubenswrapper[4779]: E0929 19:22:52.341915 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/telemetry-operator-controller-manager-b8d54b5d7-tflz6" podUID="cdc62734-f794-43fc-9af8-752098cdf316" Sep 29 19:22:52 crc kubenswrapper[4779]: E0929 19:22:52.841579 4779 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/octavia-operator@sha256:4d08afd31dc5ded10c54a5541f514ac351e9b40a183285b3db27d0757a6354c8" Sep 29 19:22:52 crc kubenswrapper[4779]: E0929 19:22:52.841810 4779 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/octavia-operator@sha256:4d08afd31dc5ded10c54a5541f514ac351e9b40a183285b3db27d0757a6354c8,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-gzh7q,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod octavia-operator-controller-manager-76fcc6dc7c-mtqwc_openstack-operators(1c3a147f-0c72-4889-80aa-8b53a0c9ea3f): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Sep 29 19:22:52 crc kubenswrapper[4779]: E0929 19:22:52.843428 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/octavia-operator-controller-manager-76fcc6dc7c-mtqwc" podUID="1c3a147f-0c72-4889-80aa-8b53a0c9ea3f" Sep 29 19:22:53 crc kubenswrapper[4779]: I0929 19:22:53.106370 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-tmdsl" Sep 29 19:22:53 crc kubenswrapper[4779]: I0929 19:22:53.205758 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5cf2ee22-29b0-4662-a23b-c02ef8ddb9dc-catalog-content\") pod \"5cf2ee22-29b0-4662-a23b-c02ef8ddb9dc\" (UID: \"5cf2ee22-29b0-4662-a23b-c02ef8ddb9dc\") " Sep 29 19:22:53 crc kubenswrapper[4779]: I0929 19:22:53.205807 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-br47j\" (UniqueName: \"kubernetes.io/projected/5cf2ee22-29b0-4662-a23b-c02ef8ddb9dc-kube-api-access-br47j\") pod \"5cf2ee22-29b0-4662-a23b-c02ef8ddb9dc\" (UID: \"5cf2ee22-29b0-4662-a23b-c02ef8ddb9dc\") " Sep 29 19:22:53 crc kubenswrapper[4779]: I0929 19:22:53.205883 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5cf2ee22-29b0-4662-a23b-c02ef8ddb9dc-utilities\") pod \"5cf2ee22-29b0-4662-a23b-c02ef8ddb9dc\" (UID: \"5cf2ee22-29b0-4662-a23b-c02ef8ddb9dc\") " Sep 29 19:22:53 crc kubenswrapper[4779]: I0929 19:22:53.207025 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5cf2ee22-29b0-4662-a23b-c02ef8ddb9dc-utilities" (OuterVolumeSpecName: "utilities") pod "5cf2ee22-29b0-4662-a23b-c02ef8ddb9dc" (UID: "5cf2ee22-29b0-4662-a23b-c02ef8ddb9dc"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 19:22:53 crc kubenswrapper[4779]: I0929 19:22:53.219868 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5cf2ee22-29b0-4662-a23b-c02ef8ddb9dc-kube-api-access-br47j" (OuterVolumeSpecName: "kube-api-access-br47j") pod "5cf2ee22-29b0-4662-a23b-c02ef8ddb9dc" (UID: "5cf2ee22-29b0-4662-a23b-c02ef8ddb9dc"). InnerVolumeSpecName "kube-api-access-br47j". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:22:53 crc kubenswrapper[4779]: I0929 19:22:53.308002 4779 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5cf2ee22-29b0-4662-a23b-c02ef8ddb9dc-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 19:22:53 crc kubenswrapper[4779]: I0929 19:22:53.308035 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-br47j\" (UniqueName: \"kubernetes.io/projected/5cf2ee22-29b0-4662-a23b-c02ef8ddb9dc-kube-api-access-br47j\") on node \"crc\" DevicePath \"\"" Sep 29 19:22:53 crc kubenswrapper[4779]: I0929 19:22:53.319856 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5cf2ee22-29b0-4662-a23b-c02ef8ddb9dc-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5cf2ee22-29b0-4662-a23b-c02ef8ddb9dc" (UID: "5cf2ee22-29b0-4662-a23b-c02ef8ddb9dc"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 19:22:53 crc kubenswrapper[4779]: I0929 19:22:53.409884 4779 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5cf2ee22-29b0-4662-a23b-c02ef8ddb9dc-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 19:22:53 crc kubenswrapper[4779]: I0929 19:22:53.801683 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-bc7dc7bd9-9jgwn" event={"ID":"38ef5cba-94db-4e0d-b2ad-290293848c65","Type":"ContainerStarted","Data":"910be5ca657c9302669fa334b953616bcaa23fc34900702eab8acfede74f3f0e"} Sep 29 19:22:53 crc kubenswrapper[4779]: I0929 19:22:53.802405 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/swift-operator-controller-manager-bc7dc7bd9-9jgwn" Sep 29 19:22:53 crc kubenswrapper[4779]: I0929 19:22:53.805069 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-6d776955-54f6z" event={"ID":"49adbd52-e22f-4cc8-9ccb-6cb8c64f12f4","Type":"ContainerStarted","Data":"4cd5818ee0e668789286ac1b7ee29c247ea750477b40022456d4da2693539320"} Sep 29 19:22:53 crc kubenswrapper[4779]: I0929 19:22:53.805341 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-baremetal-operator-controller-manager-6d776955-54f6z" Sep 29 19:22:53 crc kubenswrapper[4779]: I0929 19:22:53.807735 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-9976ff44c-f7xwn" event={"ID":"63554382-d024-4d43-b5c5-b31b80d47749","Type":"ContainerStarted","Data":"888ac062c77f123e0355f183512ae8354ca965bfdba0e6b2129cb9a7fc73ab53"} Sep 29 19:22:53 crc kubenswrapper[4779]: I0929 19:22:53.807958 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/ovn-operator-controller-manager-9976ff44c-f7xwn" Sep 29 19:22:53 crc kubenswrapper[4779]: I0929 19:22:53.810042 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-589c58c6c-8fjvn" event={"ID":"dfc872d3-d6c0-42af-9ab7-7695257d969f","Type":"ContainerStarted","Data":"8542a9320795051750c96b3197a276e05f8321c41963b65a1b195ed369dfe0fa"} Sep 29 19:22:53 crc kubenswrapper[4779]: I0929 19:22:53.810264 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/placement-operator-controller-manager-589c58c6c-8fjvn" Sep 29 19:22:53 crc kubenswrapper[4779]: I0929 19:22:53.812838 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tmdsl" event={"ID":"5cf2ee22-29b0-4662-a23b-c02ef8ddb9dc","Type":"ContainerDied","Data":"957152fa9f347121421ab88a97d2b427c14457f73ab869cdfe2b2a367e50aa35"} Sep 29 19:22:53 crc kubenswrapper[4779]: I0929 19:22:53.812874 4779 scope.go:117] "RemoveContainer" containerID="0316939498d5602daf7f7b6939c68b83e400f655c19f28ec3aaebae5263922c9" Sep 29 19:22:53 crc kubenswrapper[4779]: I0929 19:22:53.812911 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-tmdsl" Sep 29 19:22:53 crc kubenswrapper[4779]: I0929 19:22:53.831056 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/swift-operator-controller-manager-bc7dc7bd9-9jgwn" podStartSLOduration=3.791988408 podStartE2EDuration="33.83102902s" podCreationTimestamp="2025-09-29 19:22:20 +0000 UTC" firstStartedPulling="2025-09-29 19:22:22.844863834 +0000 UTC m=+853.729288934" lastFinishedPulling="2025-09-29 19:22:52.883904446 +0000 UTC m=+883.768329546" observedRunningTime="2025-09-29 19:22:53.829163729 +0000 UTC m=+884.713588829" watchObservedRunningTime="2025-09-29 19:22:53.83102902 +0000 UTC m=+884.715454130" Sep 29 19:22:53 crc kubenswrapper[4779]: I0929 19:22:53.840150 4779 scope.go:117] "RemoveContainer" containerID="6fcc1620c1df9ef343de40bbdbe60d6ddd030375dbaa6a0731c358c501e01722" Sep 29 19:22:53 crc kubenswrapper[4779]: I0929 19:22:53.861148 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/ovn-operator-controller-manager-9976ff44c-f7xwn" podStartSLOduration=3.871637259 podStartE2EDuration="33.86112686s" podCreationTimestamp="2025-09-29 19:22:20 +0000 UTC" firstStartedPulling="2025-09-29 19:22:22.839863308 +0000 UTC m=+853.724288408" lastFinishedPulling="2025-09-29 19:22:52.829352869 +0000 UTC m=+883.713778009" observedRunningTime="2025-09-29 19:22:53.855755934 +0000 UTC m=+884.740181074" watchObservedRunningTime="2025-09-29 19:22:53.86112686 +0000 UTC m=+884.745551970" Sep 29 19:22:53 crc kubenswrapper[4779]: I0929 19:22:53.882694 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-tmdsl"] Sep 29 19:22:53 crc kubenswrapper[4779]: I0929 19:22:53.884093 4779 scope.go:117] "RemoveContainer" containerID="250e53a92e8f796144c2edcc62aee9c920b738634490d79273fafc852dce095a" Sep 29 19:22:53 crc kubenswrapper[4779]: I0929 19:22:53.895126 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-tmdsl"] Sep 29 19:22:53 crc kubenswrapper[4779]: I0929 19:22:53.943463 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-baremetal-operator-controller-manager-6d776955-54f6z" podStartSLOduration=4.01483731 podStartE2EDuration="33.943444803s" podCreationTimestamp="2025-09-29 19:22:20 +0000 UTC" firstStartedPulling="2025-09-29 19:22:22.920685 +0000 UTC m=+853.805110100" lastFinishedPulling="2025-09-29 19:22:52.849292503 +0000 UTC m=+883.733717593" observedRunningTime="2025-09-29 19:22:53.939813134 +0000 UTC m=+884.824238244" watchObservedRunningTime="2025-09-29 19:22:53.943444803 +0000 UTC m=+884.827869923" Sep 29 19:22:53 crc kubenswrapper[4779]: I0929 19:22:53.946301 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/placement-operator-controller-manager-589c58c6c-8fjvn" podStartSLOduration=3.937796782 podStartE2EDuration="33.946292661s" podCreationTimestamp="2025-09-29 19:22:20 +0000 UTC" firstStartedPulling="2025-09-29 19:22:22.840158506 +0000 UTC m=+853.724583606" lastFinishedPulling="2025-09-29 19:22:52.848654345 +0000 UTC m=+883.733079485" observedRunningTime="2025-09-29 19:22:53.911988686 +0000 UTC m=+884.796413776" watchObservedRunningTime="2025-09-29 19:22:53.946292661 +0000 UTC m=+884.830717781" Sep 29 19:22:55 crc kubenswrapper[4779]: I0929 19:22:55.780410 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5cf2ee22-29b0-4662-a23b-c02ef8ddb9dc" path="/var/lib/kubelet/pods/5cf2ee22-29b0-4662-a23b-c02ef8ddb9dc/volumes" Sep 29 19:23:00 crc kubenswrapper[4779]: I0929 19:23:00.533184 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-tc6wm"] Sep 29 19:23:00 crc kubenswrapper[4779]: E0929 19:23:00.534236 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5cf2ee22-29b0-4662-a23b-c02ef8ddb9dc" containerName="extract-utilities" Sep 29 19:23:00 crc kubenswrapper[4779]: I0929 19:23:00.534259 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="5cf2ee22-29b0-4662-a23b-c02ef8ddb9dc" containerName="extract-utilities" Sep 29 19:23:00 crc kubenswrapper[4779]: E0929 19:23:00.534302 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5cf2ee22-29b0-4662-a23b-c02ef8ddb9dc" containerName="extract-content" Sep 29 19:23:00 crc kubenswrapper[4779]: I0929 19:23:00.534385 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="5cf2ee22-29b0-4662-a23b-c02ef8ddb9dc" containerName="extract-content" Sep 29 19:23:00 crc kubenswrapper[4779]: E0929 19:23:00.534434 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5cf2ee22-29b0-4662-a23b-c02ef8ddb9dc" containerName="registry-server" Sep 29 19:23:00 crc kubenswrapper[4779]: I0929 19:23:00.534447 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="5cf2ee22-29b0-4662-a23b-c02ef8ddb9dc" containerName="registry-server" Sep 29 19:23:00 crc kubenswrapper[4779]: I0929 19:23:00.534702 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="5cf2ee22-29b0-4662-a23b-c02ef8ddb9dc" containerName="registry-server" Sep 29 19:23:00 crc kubenswrapper[4779]: I0929 19:23:00.537375 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-tc6wm" Sep 29 19:23:00 crc kubenswrapper[4779]: I0929 19:23:00.553248 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-tc6wm"] Sep 29 19:23:00 crc kubenswrapper[4779]: I0929 19:23:00.739002 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pzr5l\" (UniqueName: \"kubernetes.io/projected/12759fbd-4f00-48d9-aa92-161551c13713-kube-api-access-pzr5l\") pod \"community-operators-tc6wm\" (UID: \"12759fbd-4f00-48d9-aa92-161551c13713\") " pod="openshift-marketplace/community-operators-tc6wm" Sep 29 19:23:00 crc kubenswrapper[4779]: I0929 19:23:00.739855 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/12759fbd-4f00-48d9-aa92-161551c13713-catalog-content\") pod \"community-operators-tc6wm\" (UID: \"12759fbd-4f00-48d9-aa92-161551c13713\") " pod="openshift-marketplace/community-operators-tc6wm" Sep 29 19:23:00 crc kubenswrapper[4779]: I0929 19:23:00.740104 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/12759fbd-4f00-48d9-aa92-161551c13713-utilities\") pod \"community-operators-tc6wm\" (UID: \"12759fbd-4f00-48d9-aa92-161551c13713\") " pod="openshift-marketplace/community-operators-tc6wm" Sep 29 19:23:00 crc kubenswrapper[4779]: I0929 19:23:00.840827 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/12759fbd-4f00-48d9-aa92-161551c13713-utilities\") pod \"community-operators-tc6wm\" (UID: \"12759fbd-4f00-48d9-aa92-161551c13713\") " pod="openshift-marketplace/community-operators-tc6wm" Sep 29 19:23:00 crc kubenswrapper[4779]: I0929 19:23:00.840875 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pzr5l\" (UniqueName: \"kubernetes.io/projected/12759fbd-4f00-48d9-aa92-161551c13713-kube-api-access-pzr5l\") pod \"community-operators-tc6wm\" (UID: \"12759fbd-4f00-48d9-aa92-161551c13713\") " pod="openshift-marketplace/community-operators-tc6wm" Sep 29 19:23:00 crc kubenswrapper[4779]: I0929 19:23:00.840932 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/12759fbd-4f00-48d9-aa92-161551c13713-catalog-content\") pod \"community-operators-tc6wm\" (UID: \"12759fbd-4f00-48d9-aa92-161551c13713\") " pod="openshift-marketplace/community-operators-tc6wm" Sep 29 19:23:00 crc kubenswrapper[4779]: I0929 19:23:00.841372 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/12759fbd-4f00-48d9-aa92-161551c13713-catalog-content\") pod \"community-operators-tc6wm\" (UID: \"12759fbd-4f00-48d9-aa92-161551c13713\") " pod="openshift-marketplace/community-operators-tc6wm" Sep 29 19:23:00 crc kubenswrapper[4779]: I0929 19:23:00.841475 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/12759fbd-4f00-48d9-aa92-161551c13713-utilities\") pod \"community-operators-tc6wm\" (UID: \"12759fbd-4f00-48d9-aa92-161551c13713\") " pod="openshift-marketplace/community-operators-tc6wm" Sep 29 19:23:00 crc kubenswrapper[4779]: I0929 19:23:00.859724 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pzr5l\" (UniqueName: \"kubernetes.io/projected/12759fbd-4f00-48d9-aa92-161551c13713-kube-api-access-pzr5l\") pod \"community-operators-tc6wm\" (UID: \"12759fbd-4f00-48d9-aa92-161551c13713\") " pod="openshift-marketplace/community-operators-tc6wm" Sep 29 19:23:00 crc kubenswrapper[4779]: I0929 19:23:00.867871 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-tc6wm" Sep 29 19:23:01 crc kubenswrapper[4779]: I0929 19:23:01.357486 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-tc6wm"] Sep 29 19:23:01 crc kubenswrapper[4779]: I0929 19:23:01.480779 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/ovn-operator-controller-manager-9976ff44c-f7xwn" Sep 29 19:23:01 crc kubenswrapper[4779]: I0929 19:23:01.618912 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/placement-operator-controller-manager-589c58c6c-8fjvn" Sep 29 19:23:01 crc kubenswrapper[4779]: I0929 19:23:01.622590 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/swift-operator-controller-manager-bc7dc7bd9-9jgwn" Sep 29 19:23:01 crc kubenswrapper[4779]: I0929 19:23:01.812081 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-baremetal-operator-controller-manager-6d776955-54f6z" Sep 29 19:23:01 crc kubenswrapper[4779]: I0929 19:23:01.927170 4779 generic.go:334] "Generic (PLEG): container finished" podID="12759fbd-4f00-48d9-aa92-161551c13713" containerID="fd1b471828def16347cefdd32f2b973afef7c90a1a4c36b8c786010a5ab92554" exitCode=0 Sep 29 19:23:01 crc kubenswrapper[4779]: I0929 19:23:01.927224 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-tc6wm" event={"ID":"12759fbd-4f00-48d9-aa92-161551c13713","Type":"ContainerDied","Data":"fd1b471828def16347cefdd32f2b973afef7c90a1a4c36b8c786010a5ab92554"} Sep 29 19:23:01 crc kubenswrapper[4779]: I0929 19:23:01.927255 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-tc6wm" event={"ID":"12759fbd-4f00-48d9-aa92-161551c13713","Type":"ContainerStarted","Data":"891344eec4ce754a0de2cee7cc2d68fd26cb083ca348a1e7611cc727a0e0bc79"} Sep 29 19:23:02 crc kubenswrapper[4779]: E0929 19:23:02.769236 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/heat-operator@sha256:917e6dcc519277c46e42898bc9f0f066790fa7b9633fcde668cc8a68a547c13c\\\"\"" pod="openstack-operators/heat-operator-controller-manager-5d889d78cf-fql2p" podUID="df41de35-4c6d-4313-8ccb-19dcead38269" Sep 29 19:23:02 crc kubenswrapper[4779]: I0929 19:23:02.938805 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-tc6wm" event={"ID":"12759fbd-4f00-48d9-aa92-161551c13713","Type":"ContainerStarted","Data":"c83e1395678941116867ae4e9671761f4bf4d9e64a49045923b257732af4d85a"} Sep 29 19:23:03 crc kubenswrapper[4779]: E0929 19:23:03.768623 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/telemetry-operator@sha256:8fdf377daf05e2fa7346505017078fa81981dd945bf635a64c8022633c68118f\\\"\"" pod="openstack-operators/telemetry-operator-controller-manager-b8d54b5d7-tflz6" podUID="cdc62734-f794-43fc-9af8-752098cdf316" Sep 29 19:23:03 crc kubenswrapper[4779]: E0929 19:23:03.768775 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/octavia-operator@sha256:4d08afd31dc5ded10c54a5541f514ac351e9b40a183285b3db27d0757a6354c8\\\"\"" pod="openstack-operators/octavia-operator-controller-manager-76fcc6dc7c-mtqwc" podUID="1c3a147f-0c72-4889-80aa-8b53a0c9ea3f" Sep 29 19:23:03 crc kubenswrapper[4779]: I0929 19:23:03.952892 4779 generic.go:334] "Generic (PLEG): container finished" podID="12759fbd-4f00-48d9-aa92-161551c13713" containerID="c83e1395678941116867ae4e9671761f4bf4d9e64a49045923b257732af4d85a" exitCode=0 Sep 29 19:23:03 crc kubenswrapper[4779]: I0929 19:23:03.952982 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-tc6wm" event={"ID":"12759fbd-4f00-48d9-aa92-161551c13713","Type":"ContainerDied","Data":"c83e1395678941116867ae4e9671761f4bf4d9e64a49045923b257732af4d85a"} Sep 29 19:23:04 crc kubenswrapper[4779]: I0929 19:23:04.963198 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-tc6wm" event={"ID":"12759fbd-4f00-48d9-aa92-161551c13713","Type":"ContainerStarted","Data":"aac18298c24c8d6ca8a4be393ac0f20934c62bfe7fd22ad0668fcbbd1e463b99"} Sep 29 19:23:04 crc kubenswrapper[4779]: I0929 19:23:04.988484 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-tc6wm" podStartSLOduration=2.553090841 podStartE2EDuration="4.988466765s" podCreationTimestamp="2025-09-29 19:23:00 +0000 UTC" firstStartedPulling="2025-09-29 19:23:01.928676658 +0000 UTC m=+892.813101758" lastFinishedPulling="2025-09-29 19:23:04.364052562 +0000 UTC m=+895.248477682" observedRunningTime="2025-09-29 19:23:04.984188668 +0000 UTC m=+895.868613788" watchObservedRunningTime="2025-09-29 19:23:04.988466765 +0000 UTC m=+895.872891875" Sep 29 19:23:10 crc kubenswrapper[4779]: I0929 19:23:10.868024 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-tc6wm" Sep 29 19:23:10 crc kubenswrapper[4779]: I0929 19:23:10.869647 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-tc6wm" Sep 29 19:23:10 crc kubenswrapper[4779]: I0929 19:23:10.932007 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-tc6wm" Sep 29 19:23:11 crc kubenswrapper[4779]: I0929 19:23:11.080930 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-tc6wm" Sep 29 19:23:12 crc kubenswrapper[4779]: I0929 19:23:12.300768 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-tc6wm"] Sep 29 19:23:14 crc kubenswrapper[4779]: I0929 19:23:14.048253 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-tc6wm" podUID="12759fbd-4f00-48d9-aa92-161551c13713" containerName="registry-server" containerID="cri-o://aac18298c24c8d6ca8a4be393ac0f20934c62bfe7fd22ad0668fcbbd1e463b99" gracePeriod=2 Sep 29 19:23:14 crc kubenswrapper[4779]: I0929 19:23:14.550691 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-tc6wm" Sep 29 19:23:14 crc kubenswrapper[4779]: I0929 19:23:14.654580 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pzr5l\" (UniqueName: \"kubernetes.io/projected/12759fbd-4f00-48d9-aa92-161551c13713-kube-api-access-pzr5l\") pod \"12759fbd-4f00-48d9-aa92-161551c13713\" (UID: \"12759fbd-4f00-48d9-aa92-161551c13713\") " Sep 29 19:23:14 crc kubenswrapper[4779]: I0929 19:23:14.654697 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/12759fbd-4f00-48d9-aa92-161551c13713-utilities\") pod \"12759fbd-4f00-48d9-aa92-161551c13713\" (UID: \"12759fbd-4f00-48d9-aa92-161551c13713\") " Sep 29 19:23:14 crc kubenswrapper[4779]: I0929 19:23:14.654730 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/12759fbd-4f00-48d9-aa92-161551c13713-catalog-content\") pod \"12759fbd-4f00-48d9-aa92-161551c13713\" (UID: \"12759fbd-4f00-48d9-aa92-161551c13713\") " Sep 29 19:23:14 crc kubenswrapper[4779]: I0929 19:23:14.656368 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/12759fbd-4f00-48d9-aa92-161551c13713-utilities" (OuterVolumeSpecName: "utilities") pod "12759fbd-4f00-48d9-aa92-161551c13713" (UID: "12759fbd-4f00-48d9-aa92-161551c13713"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 19:23:14 crc kubenswrapper[4779]: I0929 19:23:14.663902 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/12759fbd-4f00-48d9-aa92-161551c13713-kube-api-access-pzr5l" (OuterVolumeSpecName: "kube-api-access-pzr5l") pod "12759fbd-4f00-48d9-aa92-161551c13713" (UID: "12759fbd-4f00-48d9-aa92-161551c13713"). InnerVolumeSpecName "kube-api-access-pzr5l". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:23:14 crc kubenswrapper[4779]: I0929 19:23:14.737918 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/12759fbd-4f00-48d9-aa92-161551c13713-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "12759fbd-4f00-48d9-aa92-161551c13713" (UID: "12759fbd-4f00-48d9-aa92-161551c13713"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 19:23:14 crc kubenswrapper[4779]: I0929 19:23:14.755976 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pzr5l\" (UniqueName: \"kubernetes.io/projected/12759fbd-4f00-48d9-aa92-161551c13713-kube-api-access-pzr5l\") on node \"crc\" DevicePath \"\"" Sep 29 19:23:14 crc kubenswrapper[4779]: I0929 19:23:14.756013 4779 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/12759fbd-4f00-48d9-aa92-161551c13713-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 19:23:14 crc kubenswrapper[4779]: I0929 19:23:14.756028 4779 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/12759fbd-4f00-48d9-aa92-161551c13713-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 19:23:14 crc kubenswrapper[4779]: I0929 19:23:14.767876 4779 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Sep 29 19:23:15 crc kubenswrapper[4779]: I0929 19:23:15.063967 4779 generic.go:334] "Generic (PLEG): container finished" podID="12759fbd-4f00-48d9-aa92-161551c13713" containerID="aac18298c24c8d6ca8a4be393ac0f20934c62bfe7fd22ad0668fcbbd1e463b99" exitCode=0 Sep 29 19:23:15 crc kubenswrapper[4779]: I0929 19:23:15.064026 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-tc6wm" event={"ID":"12759fbd-4f00-48d9-aa92-161551c13713","Type":"ContainerDied","Data":"aac18298c24c8d6ca8a4be393ac0f20934c62bfe7fd22ad0668fcbbd1e463b99"} Sep 29 19:23:15 crc kubenswrapper[4779]: I0929 19:23:15.064068 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-tc6wm" Sep 29 19:23:15 crc kubenswrapper[4779]: I0929 19:23:15.064090 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-tc6wm" event={"ID":"12759fbd-4f00-48d9-aa92-161551c13713","Type":"ContainerDied","Data":"891344eec4ce754a0de2cee7cc2d68fd26cb083ca348a1e7611cc727a0e0bc79"} Sep 29 19:23:15 crc kubenswrapper[4779]: I0929 19:23:15.064116 4779 scope.go:117] "RemoveContainer" containerID="aac18298c24c8d6ca8a4be393ac0f20934c62bfe7fd22ad0668fcbbd1e463b99" Sep 29 19:23:15 crc kubenswrapper[4779]: I0929 19:23:15.100643 4779 scope.go:117] "RemoveContainer" containerID="c83e1395678941116867ae4e9671761f4bf4d9e64a49045923b257732af4d85a" Sep 29 19:23:15 crc kubenswrapper[4779]: I0929 19:23:15.121401 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-tc6wm"] Sep 29 19:23:15 crc kubenswrapper[4779]: I0929 19:23:15.127417 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-tc6wm"] Sep 29 19:23:15 crc kubenswrapper[4779]: I0929 19:23:15.157423 4779 scope.go:117] "RemoveContainer" containerID="fd1b471828def16347cefdd32f2b973afef7c90a1a4c36b8c786010a5ab92554" Sep 29 19:23:15 crc kubenswrapper[4779]: I0929 19:23:15.182623 4779 scope.go:117] "RemoveContainer" containerID="aac18298c24c8d6ca8a4be393ac0f20934c62bfe7fd22ad0668fcbbd1e463b99" Sep 29 19:23:15 crc kubenswrapper[4779]: E0929 19:23:15.183345 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"aac18298c24c8d6ca8a4be393ac0f20934c62bfe7fd22ad0668fcbbd1e463b99\": container with ID starting with aac18298c24c8d6ca8a4be393ac0f20934c62bfe7fd22ad0668fcbbd1e463b99 not found: ID does not exist" containerID="aac18298c24c8d6ca8a4be393ac0f20934c62bfe7fd22ad0668fcbbd1e463b99" Sep 29 19:23:15 crc kubenswrapper[4779]: I0929 19:23:15.183391 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"aac18298c24c8d6ca8a4be393ac0f20934c62bfe7fd22ad0668fcbbd1e463b99"} err="failed to get container status \"aac18298c24c8d6ca8a4be393ac0f20934c62bfe7fd22ad0668fcbbd1e463b99\": rpc error: code = NotFound desc = could not find container \"aac18298c24c8d6ca8a4be393ac0f20934c62bfe7fd22ad0668fcbbd1e463b99\": container with ID starting with aac18298c24c8d6ca8a4be393ac0f20934c62bfe7fd22ad0668fcbbd1e463b99 not found: ID does not exist" Sep 29 19:23:15 crc kubenswrapper[4779]: I0929 19:23:15.183417 4779 scope.go:117] "RemoveContainer" containerID="c83e1395678941116867ae4e9671761f4bf4d9e64a49045923b257732af4d85a" Sep 29 19:23:15 crc kubenswrapper[4779]: E0929 19:23:15.184015 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c83e1395678941116867ae4e9671761f4bf4d9e64a49045923b257732af4d85a\": container with ID starting with c83e1395678941116867ae4e9671761f4bf4d9e64a49045923b257732af4d85a not found: ID does not exist" containerID="c83e1395678941116867ae4e9671761f4bf4d9e64a49045923b257732af4d85a" Sep 29 19:23:15 crc kubenswrapper[4779]: I0929 19:23:15.184072 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c83e1395678941116867ae4e9671761f4bf4d9e64a49045923b257732af4d85a"} err="failed to get container status \"c83e1395678941116867ae4e9671761f4bf4d9e64a49045923b257732af4d85a\": rpc error: code = NotFound desc = could not find container \"c83e1395678941116867ae4e9671761f4bf4d9e64a49045923b257732af4d85a\": container with ID starting with c83e1395678941116867ae4e9671761f4bf4d9e64a49045923b257732af4d85a not found: ID does not exist" Sep 29 19:23:15 crc kubenswrapper[4779]: I0929 19:23:15.184115 4779 scope.go:117] "RemoveContainer" containerID="fd1b471828def16347cefdd32f2b973afef7c90a1a4c36b8c786010a5ab92554" Sep 29 19:23:15 crc kubenswrapper[4779]: E0929 19:23:15.184580 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fd1b471828def16347cefdd32f2b973afef7c90a1a4c36b8c786010a5ab92554\": container with ID starting with fd1b471828def16347cefdd32f2b973afef7c90a1a4c36b8c786010a5ab92554 not found: ID does not exist" containerID="fd1b471828def16347cefdd32f2b973afef7c90a1a4c36b8c786010a5ab92554" Sep 29 19:23:15 crc kubenswrapper[4779]: I0929 19:23:15.184640 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fd1b471828def16347cefdd32f2b973afef7c90a1a4c36b8c786010a5ab92554"} err="failed to get container status \"fd1b471828def16347cefdd32f2b973afef7c90a1a4c36b8c786010a5ab92554\": rpc error: code = NotFound desc = could not find container \"fd1b471828def16347cefdd32f2b973afef7c90a1a4c36b8c786010a5ab92554\": container with ID starting with fd1b471828def16347cefdd32f2b973afef7c90a1a4c36b8c786010a5ab92554 not found: ID does not exist" Sep 29 19:23:15 crc kubenswrapper[4779]: I0929 19:23:15.782038 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="12759fbd-4f00-48d9-aa92-161551c13713" path="/var/lib/kubelet/pods/12759fbd-4f00-48d9-aa92-161551c13713/volumes" Sep 29 19:23:16 crc kubenswrapper[4779]: I0929 19:23:16.076793 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-5d889d78cf-fql2p" event={"ID":"df41de35-4c6d-4313-8ccb-19dcead38269","Type":"ContainerStarted","Data":"e06b86136b601d9a2659c3aabf8e5caf5e9543e85986114ff5a7ca2dab0f3402"} Sep 29 19:23:16 crc kubenswrapper[4779]: I0929 19:23:16.077149 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/heat-operator-controller-manager-5d889d78cf-fql2p" Sep 29 19:23:16 crc kubenswrapper[4779]: I0929 19:23:16.107112 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/heat-operator-controller-manager-5d889d78cf-fql2p" podStartSLOduration=3.655319106 podStartE2EDuration="56.107087641s" podCreationTimestamp="2025-09-29 19:22:20 +0000 UTC" firstStartedPulling="2025-09-29 19:22:22.844400801 +0000 UTC m=+853.728825901" lastFinishedPulling="2025-09-29 19:23:15.296169326 +0000 UTC m=+906.180594436" observedRunningTime="2025-09-29 19:23:16.101216581 +0000 UTC m=+906.985641721" watchObservedRunningTime="2025-09-29 19:23:16.107087641 +0000 UTC m=+906.991512771" Sep 29 19:23:18 crc kubenswrapper[4779]: I0929 19:23:18.098468 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-76fcc6dc7c-mtqwc" event={"ID":"1c3a147f-0c72-4889-80aa-8b53a0c9ea3f","Type":"ContainerStarted","Data":"755861fef280ded4dfbe85532be99b28eab3095331603389842840c16e3187b3"} Sep 29 19:23:18 crc kubenswrapper[4779]: I0929 19:23:18.099230 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/octavia-operator-controller-manager-76fcc6dc7c-mtqwc" Sep 29 19:23:18 crc kubenswrapper[4779]: I0929 19:23:18.101879 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-b8d54b5d7-tflz6" event={"ID":"cdc62734-f794-43fc-9af8-752098cdf316","Type":"ContainerStarted","Data":"acb98f602cbf860eb99a344c06985e80164abd3718287d650d6f8de7a9f837ae"} Sep 29 19:23:18 crc kubenswrapper[4779]: I0929 19:23:18.103684 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/telemetry-operator-controller-manager-b8d54b5d7-tflz6" Sep 29 19:23:18 crc kubenswrapper[4779]: I0929 19:23:18.135912 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/octavia-operator-controller-manager-76fcc6dc7c-mtqwc" podStartSLOduration=3.724560803 podStartE2EDuration="58.135888718s" podCreationTimestamp="2025-09-29 19:22:20 +0000 UTC" firstStartedPulling="2025-09-29 19:22:22.844210786 +0000 UTC m=+853.728635886" lastFinishedPulling="2025-09-29 19:23:17.255538691 +0000 UTC m=+908.139963801" observedRunningTime="2025-09-29 19:23:18.125549996 +0000 UTC m=+909.009975136" watchObservedRunningTime="2025-09-29 19:23:18.135888718 +0000 UTC m=+909.020313848" Sep 29 19:23:18 crc kubenswrapper[4779]: I0929 19:23:18.158062 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/telemetry-operator-controller-manager-b8d54b5d7-tflz6" podStartSLOduration=3.804935312 podStartE2EDuration="58.158031511s" podCreationTimestamp="2025-09-29 19:22:20 +0000 UTC" firstStartedPulling="2025-09-29 19:22:22.846851248 +0000 UTC m=+853.731276348" lastFinishedPulling="2025-09-29 19:23:17.199947407 +0000 UTC m=+908.084372547" observedRunningTime="2025-09-29 19:23:18.150220128 +0000 UTC m=+909.034645258" watchObservedRunningTime="2025-09-29 19:23:18.158031511 +0000 UTC m=+909.042456651" Sep 29 19:23:20 crc kubenswrapper[4779]: I0929 19:23:20.897459 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/heat-operator-controller-manager-5d889d78cf-fql2p" Sep 29 19:23:31 crc kubenswrapper[4779]: I0929 19:23:31.465848 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/octavia-operator-controller-manager-76fcc6dc7c-mtqwc" Sep 29 19:23:31 crc kubenswrapper[4779]: I0929 19:23:31.633109 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/telemetry-operator-controller-manager-b8d54b5d7-tflz6" Sep 29 19:23:47 crc kubenswrapper[4779]: I0929 19:23:47.854850 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-hcgm5"] Sep 29 19:23:47 crc kubenswrapper[4779]: E0929 19:23:47.855753 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="12759fbd-4f00-48d9-aa92-161551c13713" containerName="extract-utilities" Sep 29 19:23:47 crc kubenswrapper[4779]: I0929 19:23:47.855771 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="12759fbd-4f00-48d9-aa92-161551c13713" containerName="extract-utilities" Sep 29 19:23:47 crc kubenswrapper[4779]: E0929 19:23:47.855788 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="12759fbd-4f00-48d9-aa92-161551c13713" containerName="registry-server" Sep 29 19:23:47 crc kubenswrapper[4779]: I0929 19:23:47.855798 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="12759fbd-4f00-48d9-aa92-161551c13713" containerName="registry-server" Sep 29 19:23:47 crc kubenswrapper[4779]: E0929 19:23:47.855817 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="12759fbd-4f00-48d9-aa92-161551c13713" containerName="extract-content" Sep 29 19:23:47 crc kubenswrapper[4779]: I0929 19:23:47.855825 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="12759fbd-4f00-48d9-aa92-161551c13713" containerName="extract-content" Sep 29 19:23:47 crc kubenswrapper[4779]: I0929 19:23:47.856008 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="12759fbd-4f00-48d9-aa92-161551c13713" containerName="registry-server" Sep 29 19:23:47 crc kubenswrapper[4779]: I0929 19:23:47.856915 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-675f4bcbfc-hcgm5" Sep 29 19:23:47 crc kubenswrapper[4779]: I0929 19:23:47.860610 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openshift-service-ca.crt" Sep 29 19:23:47 crc kubenswrapper[4779]: I0929 19:23:47.860910 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"kube-root-ca.crt" Sep 29 19:23:47 crc kubenswrapper[4779]: I0929 19:23:47.861025 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dnsmasq-dns-dockercfg-fztlb" Sep 29 19:23:47 crc kubenswrapper[4779]: I0929 19:23:47.861139 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns" Sep 29 19:23:47 crc kubenswrapper[4779]: I0929 19:23:47.871743 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-hcgm5"] Sep 29 19:23:47 crc kubenswrapper[4779]: I0929 19:23:47.916727 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-j95zd"] Sep 29 19:23:47 crc kubenswrapper[4779]: I0929 19:23:47.917773 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78dd6ddcc-j95zd" Sep 29 19:23:47 crc kubenswrapper[4779]: I0929 19:23:47.924174 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns-svc" Sep 29 19:23:47 crc kubenswrapper[4779]: I0929 19:23:47.935370 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-j95zd"] Sep 29 19:23:47 crc kubenswrapper[4779]: I0929 19:23:47.972478 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7f1776a9-9887-40a8-8518-73b8c6701186-config\") pod \"dnsmasq-dns-675f4bcbfc-hcgm5\" (UID: \"7f1776a9-9887-40a8-8518-73b8c6701186\") " pod="openstack/dnsmasq-dns-675f4bcbfc-hcgm5" Sep 29 19:23:47 crc kubenswrapper[4779]: I0929 19:23:47.972543 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qmjng\" (UniqueName: \"kubernetes.io/projected/7f1776a9-9887-40a8-8518-73b8c6701186-kube-api-access-qmjng\") pod \"dnsmasq-dns-675f4bcbfc-hcgm5\" (UID: \"7f1776a9-9887-40a8-8518-73b8c6701186\") " pod="openstack/dnsmasq-dns-675f4bcbfc-hcgm5" Sep 29 19:23:47 crc kubenswrapper[4779]: I0929 19:23:47.972580 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4f375efe-9d81-4148-8c26-b64c2c6705fa-config\") pod \"dnsmasq-dns-78dd6ddcc-j95zd\" (UID: \"4f375efe-9d81-4148-8c26-b64c2c6705fa\") " pod="openstack/dnsmasq-dns-78dd6ddcc-j95zd" Sep 29 19:23:47 crc kubenswrapper[4779]: I0929 19:23:47.972638 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7pksg\" (UniqueName: \"kubernetes.io/projected/4f375efe-9d81-4148-8c26-b64c2c6705fa-kube-api-access-7pksg\") pod \"dnsmasq-dns-78dd6ddcc-j95zd\" (UID: \"4f375efe-9d81-4148-8c26-b64c2c6705fa\") " pod="openstack/dnsmasq-dns-78dd6ddcc-j95zd" Sep 29 19:23:47 crc kubenswrapper[4779]: I0929 19:23:47.972707 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/4f375efe-9d81-4148-8c26-b64c2c6705fa-dns-svc\") pod \"dnsmasq-dns-78dd6ddcc-j95zd\" (UID: \"4f375efe-9d81-4148-8c26-b64c2c6705fa\") " pod="openstack/dnsmasq-dns-78dd6ddcc-j95zd" Sep 29 19:23:48 crc kubenswrapper[4779]: I0929 19:23:48.073758 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/4f375efe-9d81-4148-8c26-b64c2c6705fa-dns-svc\") pod \"dnsmasq-dns-78dd6ddcc-j95zd\" (UID: \"4f375efe-9d81-4148-8c26-b64c2c6705fa\") " pod="openstack/dnsmasq-dns-78dd6ddcc-j95zd" Sep 29 19:23:48 crc kubenswrapper[4779]: I0929 19:23:48.073850 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7f1776a9-9887-40a8-8518-73b8c6701186-config\") pod \"dnsmasq-dns-675f4bcbfc-hcgm5\" (UID: \"7f1776a9-9887-40a8-8518-73b8c6701186\") " pod="openstack/dnsmasq-dns-675f4bcbfc-hcgm5" Sep 29 19:23:48 crc kubenswrapper[4779]: I0929 19:23:48.073886 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qmjng\" (UniqueName: \"kubernetes.io/projected/7f1776a9-9887-40a8-8518-73b8c6701186-kube-api-access-qmjng\") pod \"dnsmasq-dns-675f4bcbfc-hcgm5\" (UID: \"7f1776a9-9887-40a8-8518-73b8c6701186\") " pod="openstack/dnsmasq-dns-675f4bcbfc-hcgm5" Sep 29 19:23:48 crc kubenswrapper[4779]: I0929 19:23:48.073915 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4f375efe-9d81-4148-8c26-b64c2c6705fa-config\") pod \"dnsmasq-dns-78dd6ddcc-j95zd\" (UID: \"4f375efe-9d81-4148-8c26-b64c2c6705fa\") " pod="openstack/dnsmasq-dns-78dd6ddcc-j95zd" Sep 29 19:23:48 crc kubenswrapper[4779]: I0929 19:23:48.073961 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7pksg\" (UniqueName: \"kubernetes.io/projected/4f375efe-9d81-4148-8c26-b64c2c6705fa-kube-api-access-7pksg\") pod \"dnsmasq-dns-78dd6ddcc-j95zd\" (UID: \"4f375efe-9d81-4148-8c26-b64c2c6705fa\") " pod="openstack/dnsmasq-dns-78dd6ddcc-j95zd" Sep 29 19:23:48 crc kubenswrapper[4779]: I0929 19:23:48.075296 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7f1776a9-9887-40a8-8518-73b8c6701186-config\") pod \"dnsmasq-dns-675f4bcbfc-hcgm5\" (UID: \"7f1776a9-9887-40a8-8518-73b8c6701186\") " pod="openstack/dnsmasq-dns-675f4bcbfc-hcgm5" Sep 29 19:23:48 crc kubenswrapper[4779]: I0929 19:23:48.075388 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/4f375efe-9d81-4148-8c26-b64c2c6705fa-dns-svc\") pod \"dnsmasq-dns-78dd6ddcc-j95zd\" (UID: \"4f375efe-9d81-4148-8c26-b64c2c6705fa\") " pod="openstack/dnsmasq-dns-78dd6ddcc-j95zd" Sep 29 19:23:48 crc kubenswrapper[4779]: I0929 19:23:48.075517 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4f375efe-9d81-4148-8c26-b64c2c6705fa-config\") pod \"dnsmasq-dns-78dd6ddcc-j95zd\" (UID: \"4f375efe-9d81-4148-8c26-b64c2c6705fa\") " pod="openstack/dnsmasq-dns-78dd6ddcc-j95zd" Sep 29 19:23:48 crc kubenswrapper[4779]: I0929 19:23:48.093072 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qmjng\" (UniqueName: \"kubernetes.io/projected/7f1776a9-9887-40a8-8518-73b8c6701186-kube-api-access-qmjng\") pod \"dnsmasq-dns-675f4bcbfc-hcgm5\" (UID: \"7f1776a9-9887-40a8-8518-73b8c6701186\") " pod="openstack/dnsmasq-dns-675f4bcbfc-hcgm5" Sep 29 19:23:48 crc kubenswrapper[4779]: I0929 19:23:48.096588 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7pksg\" (UniqueName: \"kubernetes.io/projected/4f375efe-9d81-4148-8c26-b64c2c6705fa-kube-api-access-7pksg\") pod \"dnsmasq-dns-78dd6ddcc-j95zd\" (UID: \"4f375efe-9d81-4148-8c26-b64c2c6705fa\") " pod="openstack/dnsmasq-dns-78dd6ddcc-j95zd" Sep 29 19:23:48 crc kubenswrapper[4779]: I0929 19:23:48.181910 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-675f4bcbfc-hcgm5" Sep 29 19:23:48 crc kubenswrapper[4779]: I0929 19:23:48.232435 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78dd6ddcc-j95zd" Sep 29 19:23:48 crc kubenswrapper[4779]: I0929 19:23:48.465177 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-hcgm5"] Sep 29 19:23:48 crc kubenswrapper[4779]: I0929 19:23:48.506239 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-j95zd"] Sep 29 19:23:49 crc kubenswrapper[4779]: I0929 19:23:49.373294 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-78dd6ddcc-j95zd" event={"ID":"4f375efe-9d81-4148-8c26-b64c2c6705fa","Type":"ContainerStarted","Data":"304e55ab405748fb7b7bd49dfd549afb1e9362e4ee2d954dc5bbe02c4b30c0f3"} Sep 29 19:23:49 crc kubenswrapper[4779]: I0929 19:23:49.375177 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-675f4bcbfc-hcgm5" event={"ID":"7f1776a9-9887-40a8-8518-73b8c6701186","Type":"ContainerStarted","Data":"1738acd744de4793feba92d2f91a05516e515396c2fa8b24f39449ffb6810d57"} Sep 29 19:23:50 crc kubenswrapper[4779]: I0929 19:23:50.494719 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-hcgm5"] Sep 29 19:23:50 crc kubenswrapper[4779]: I0929 19:23:50.514245 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-79mzc"] Sep 29 19:23:50 crc kubenswrapper[4779]: I0929 19:23:50.515538 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-666b6646f7-79mzc" Sep 29 19:23:50 crc kubenswrapper[4779]: I0929 19:23:50.523673 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-79mzc"] Sep 29 19:23:50 crc kubenswrapper[4779]: I0929 19:23:50.613634 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wzx45\" (UniqueName: \"kubernetes.io/projected/7687e8ef-621c-432c-8153-dbc11707461f-kube-api-access-wzx45\") pod \"dnsmasq-dns-666b6646f7-79mzc\" (UID: \"7687e8ef-621c-432c-8153-dbc11707461f\") " pod="openstack/dnsmasq-dns-666b6646f7-79mzc" Sep 29 19:23:50 crc kubenswrapper[4779]: I0929 19:23:50.613713 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7687e8ef-621c-432c-8153-dbc11707461f-config\") pod \"dnsmasq-dns-666b6646f7-79mzc\" (UID: \"7687e8ef-621c-432c-8153-dbc11707461f\") " pod="openstack/dnsmasq-dns-666b6646f7-79mzc" Sep 29 19:23:50 crc kubenswrapper[4779]: I0929 19:23:50.613747 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7687e8ef-621c-432c-8153-dbc11707461f-dns-svc\") pod \"dnsmasq-dns-666b6646f7-79mzc\" (UID: \"7687e8ef-621c-432c-8153-dbc11707461f\") " pod="openstack/dnsmasq-dns-666b6646f7-79mzc" Sep 29 19:23:50 crc kubenswrapper[4779]: I0929 19:23:50.714723 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7687e8ef-621c-432c-8153-dbc11707461f-config\") pod \"dnsmasq-dns-666b6646f7-79mzc\" (UID: \"7687e8ef-621c-432c-8153-dbc11707461f\") " pod="openstack/dnsmasq-dns-666b6646f7-79mzc" Sep 29 19:23:50 crc kubenswrapper[4779]: I0929 19:23:50.714778 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7687e8ef-621c-432c-8153-dbc11707461f-dns-svc\") pod \"dnsmasq-dns-666b6646f7-79mzc\" (UID: \"7687e8ef-621c-432c-8153-dbc11707461f\") " pod="openstack/dnsmasq-dns-666b6646f7-79mzc" Sep 29 19:23:50 crc kubenswrapper[4779]: I0929 19:23:50.714890 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wzx45\" (UniqueName: \"kubernetes.io/projected/7687e8ef-621c-432c-8153-dbc11707461f-kube-api-access-wzx45\") pod \"dnsmasq-dns-666b6646f7-79mzc\" (UID: \"7687e8ef-621c-432c-8153-dbc11707461f\") " pod="openstack/dnsmasq-dns-666b6646f7-79mzc" Sep 29 19:23:50 crc kubenswrapper[4779]: I0929 19:23:50.715819 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7687e8ef-621c-432c-8153-dbc11707461f-config\") pod \"dnsmasq-dns-666b6646f7-79mzc\" (UID: \"7687e8ef-621c-432c-8153-dbc11707461f\") " pod="openstack/dnsmasq-dns-666b6646f7-79mzc" Sep 29 19:23:50 crc kubenswrapper[4779]: I0929 19:23:50.715819 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7687e8ef-621c-432c-8153-dbc11707461f-dns-svc\") pod \"dnsmasq-dns-666b6646f7-79mzc\" (UID: \"7687e8ef-621c-432c-8153-dbc11707461f\") " pod="openstack/dnsmasq-dns-666b6646f7-79mzc" Sep 29 19:23:50 crc kubenswrapper[4779]: I0929 19:23:50.748166 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wzx45\" (UniqueName: \"kubernetes.io/projected/7687e8ef-621c-432c-8153-dbc11707461f-kube-api-access-wzx45\") pod \"dnsmasq-dns-666b6646f7-79mzc\" (UID: \"7687e8ef-621c-432c-8153-dbc11707461f\") " pod="openstack/dnsmasq-dns-666b6646f7-79mzc" Sep 29 19:23:50 crc kubenswrapper[4779]: I0929 19:23:50.799353 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-j95zd"] Sep 29 19:23:50 crc kubenswrapper[4779]: I0929 19:23:50.818175 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-wn9td"] Sep 29 19:23:50 crc kubenswrapper[4779]: I0929 19:23:50.823991 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d769cc4f-wn9td" Sep 29 19:23:50 crc kubenswrapper[4779]: I0929 19:23:50.828580 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-wn9td"] Sep 29 19:23:50 crc kubenswrapper[4779]: I0929 19:23:50.840159 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-666b6646f7-79mzc" Sep 29 19:23:50 crc kubenswrapper[4779]: I0929 19:23:50.923270 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/92364a65-9a43-4f86-8798-daf475ca26e6-dns-svc\") pod \"dnsmasq-dns-57d769cc4f-wn9td\" (UID: \"92364a65-9a43-4f86-8798-daf475ca26e6\") " pod="openstack/dnsmasq-dns-57d769cc4f-wn9td" Sep 29 19:23:50 crc kubenswrapper[4779]: I0929 19:23:50.927494 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2hmft\" (UniqueName: \"kubernetes.io/projected/92364a65-9a43-4f86-8798-daf475ca26e6-kube-api-access-2hmft\") pod \"dnsmasq-dns-57d769cc4f-wn9td\" (UID: \"92364a65-9a43-4f86-8798-daf475ca26e6\") " pod="openstack/dnsmasq-dns-57d769cc4f-wn9td" Sep 29 19:23:50 crc kubenswrapper[4779]: I0929 19:23:50.927647 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/92364a65-9a43-4f86-8798-daf475ca26e6-config\") pod \"dnsmasq-dns-57d769cc4f-wn9td\" (UID: \"92364a65-9a43-4f86-8798-daf475ca26e6\") " pod="openstack/dnsmasq-dns-57d769cc4f-wn9td" Sep 29 19:23:51 crc kubenswrapper[4779]: I0929 19:23:51.029302 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/92364a65-9a43-4f86-8798-daf475ca26e6-config\") pod \"dnsmasq-dns-57d769cc4f-wn9td\" (UID: \"92364a65-9a43-4f86-8798-daf475ca26e6\") " pod="openstack/dnsmasq-dns-57d769cc4f-wn9td" Sep 29 19:23:51 crc kubenswrapper[4779]: I0929 19:23:51.029400 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/92364a65-9a43-4f86-8798-daf475ca26e6-dns-svc\") pod \"dnsmasq-dns-57d769cc4f-wn9td\" (UID: \"92364a65-9a43-4f86-8798-daf475ca26e6\") " pod="openstack/dnsmasq-dns-57d769cc4f-wn9td" Sep 29 19:23:51 crc kubenswrapper[4779]: I0929 19:23:51.029429 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2hmft\" (UniqueName: \"kubernetes.io/projected/92364a65-9a43-4f86-8798-daf475ca26e6-kube-api-access-2hmft\") pod \"dnsmasq-dns-57d769cc4f-wn9td\" (UID: \"92364a65-9a43-4f86-8798-daf475ca26e6\") " pod="openstack/dnsmasq-dns-57d769cc4f-wn9td" Sep 29 19:23:51 crc kubenswrapper[4779]: I0929 19:23:51.030236 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/92364a65-9a43-4f86-8798-daf475ca26e6-config\") pod \"dnsmasq-dns-57d769cc4f-wn9td\" (UID: \"92364a65-9a43-4f86-8798-daf475ca26e6\") " pod="openstack/dnsmasq-dns-57d769cc4f-wn9td" Sep 29 19:23:51 crc kubenswrapper[4779]: I0929 19:23:51.030287 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/92364a65-9a43-4f86-8798-daf475ca26e6-dns-svc\") pod \"dnsmasq-dns-57d769cc4f-wn9td\" (UID: \"92364a65-9a43-4f86-8798-daf475ca26e6\") " pod="openstack/dnsmasq-dns-57d769cc4f-wn9td" Sep 29 19:23:51 crc kubenswrapper[4779]: I0929 19:23:51.046590 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2hmft\" (UniqueName: \"kubernetes.io/projected/92364a65-9a43-4f86-8798-daf475ca26e6-kube-api-access-2hmft\") pod \"dnsmasq-dns-57d769cc4f-wn9td\" (UID: \"92364a65-9a43-4f86-8798-daf475ca26e6\") " pod="openstack/dnsmasq-dns-57d769cc4f-wn9td" Sep 29 19:23:51 crc kubenswrapper[4779]: I0929 19:23:51.149077 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d769cc4f-wn9td" Sep 29 19:23:51 crc kubenswrapper[4779]: I0929 19:23:51.307220 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-79mzc"] Sep 29 19:23:51 crc kubenswrapper[4779]: I0929 19:23:51.649966 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-server-0"] Sep 29 19:23:51 crc kubenswrapper[4779]: I0929 19:23:51.651062 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Sep 29 19:23:51 crc kubenswrapper[4779]: I0929 19:23:51.654122 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-erlang-cookie" Sep 29 19:23:51 crc kubenswrapper[4779]: I0929 19:23:51.654773 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-server-dockercfg-6clwg" Sep 29 19:23:51 crc kubenswrapper[4779]: I0929 19:23:51.657696 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-server-conf" Sep 29 19:23:51 crc kubenswrapper[4779]: I0929 19:23:51.661915 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-default-user" Sep 29 19:23:51 crc kubenswrapper[4779]: I0929 19:23:51.661923 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-svc" Sep 29 19:23:51 crc kubenswrapper[4779]: I0929 19:23:51.661941 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-config-data" Sep 29 19:23:51 crc kubenswrapper[4779]: I0929 19:23:51.661987 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-plugins-conf" Sep 29 19:23:51 crc kubenswrapper[4779]: I0929 19:23:51.664912 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Sep 29 19:23:51 crc kubenswrapper[4779]: I0929 19:23:51.737501 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mpj5n\" (UniqueName: \"kubernetes.io/projected/523fd020-2e02-4807-93b8-82ecbd1152eb-kube-api-access-mpj5n\") pod \"rabbitmq-server-0\" (UID: \"523fd020-2e02-4807-93b8-82ecbd1152eb\") " pod="openstack/rabbitmq-server-0" Sep 29 19:23:51 crc kubenswrapper[4779]: I0929 19:23:51.737537 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/523fd020-2e02-4807-93b8-82ecbd1152eb-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"523fd020-2e02-4807-93b8-82ecbd1152eb\") " pod="openstack/rabbitmq-server-0" Sep 29 19:23:51 crc kubenswrapper[4779]: I0929 19:23:51.737651 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/523fd020-2e02-4807-93b8-82ecbd1152eb-server-conf\") pod \"rabbitmq-server-0\" (UID: \"523fd020-2e02-4807-93b8-82ecbd1152eb\") " pod="openstack/rabbitmq-server-0" Sep 29 19:23:51 crc kubenswrapper[4779]: I0929 19:23:51.737757 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/523fd020-2e02-4807-93b8-82ecbd1152eb-pod-info\") pod \"rabbitmq-server-0\" (UID: \"523fd020-2e02-4807-93b8-82ecbd1152eb\") " pod="openstack/rabbitmq-server-0" Sep 29 19:23:51 crc kubenswrapper[4779]: I0929 19:23:51.737811 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/523fd020-2e02-4807-93b8-82ecbd1152eb-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"523fd020-2e02-4807-93b8-82ecbd1152eb\") " pod="openstack/rabbitmq-server-0" Sep 29 19:23:51 crc kubenswrapper[4779]: I0929 19:23:51.737912 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/523fd020-2e02-4807-93b8-82ecbd1152eb-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"523fd020-2e02-4807-93b8-82ecbd1152eb\") " pod="openstack/rabbitmq-server-0" Sep 29 19:23:51 crc kubenswrapper[4779]: I0929 19:23:51.737981 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"rabbitmq-server-0\" (UID: \"523fd020-2e02-4807-93b8-82ecbd1152eb\") " pod="openstack/rabbitmq-server-0" Sep 29 19:23:51 crc kubenswrapper[4779]: I0929 19:23:51.738038 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/523fd020-2e02-4807-93b8-82ecbd1152eb-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"523fd020-2e02-4807-93b8-82ecbd1152eb\") " pod="openstack/rabbitmq-server-0" Sep 29 19:23:51 crc kubenswrapper[4779]: I0929 19:23:51.738067 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/523fd020-2e02-4807-93b8-82ecbd1152eb-config-data\") pod \"rabbitmq-server-0\" (UID: \"523fd020-2e02-4807-93b8-82ecbd1152eb\") " pod="openstack/rabbitmq-server-0" Sep 29 19:23:51 crc kubenswrapper[4779]: I0929 19:23:51.738090 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/523fd020-2e02-4807-93b8-82ecbd1152eb-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"523fd020-2e02-4807-93b8-82ecbd1152eb\") " pod="openstack/rabbitmq-server-0" Sep 29 19:23:51 crc kubenswrapper[4779]: I0929 19:23:51.738118 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/523fd020-2e02-4807-93b8-82ecbd1152eb-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"523fd020-2e02-4807-93b8-82ecbd1152eb\") " pod="openstack/rabbitmq-server-0" Sep 29 19:23:51 crc kubenswrapper[4779]: I0929 19:23:51.839592 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"rabbitmq-server-0\" (UID: \"523fd020-2e02-4807-93b8-82ecbd1152eb\") " pod="openstack/rabbitmq-server-0" Sep 29 19:23:51 crc kubenswrapper[4779]: I0929 19:23:51.839659 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/523fd020-2e02-4807-93b8-82ecbd1152eb-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"523fd020-2e02-4807-93b8-82ecbd1152eb\") " pod="openstack/rabbitmq-server-0" Sep 29 19:23:51 crc kubenswrapper[4779]: I0929 19:23:51.839686 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/523fd020-2e02-4807-93b8-82ecbd1152eb-config-data\") pod \"rabbitmq-server-0\" (UID: \"523fd020-2e02-4807-93b8-82ecbd1152eb\") " pod="openstack/rabbitmq-server-0" Sep 29 19:23:51 crc kubenswrapper[4779]: I0929 19:23:51.839710 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/523fd020-2e02-4807-93b8-82ecbd1152eb-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"523fd020-2e02-4807-93b8-82ecbd1152eb\") " pod="openstack/rabbitmq-server-0" Sep 29 19:23:51 crc kubenswrapper[4779]: I0929 19:23:51.839737 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/523fd020-2e02-4807-93b8-82ecbd1152eb-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"523fd020-2e02-4807-93b8-82ecbd1152eb\") " pod="openstack/rabbitmq-server-0" Sep 29 19:23:51 crc kubenswrapper[4779]: I0929 19:23:51.839761 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mpj5n\" (UniqueName: \"kubernetes.io/projected/523fd020-2e02-4807-93b8-82ecbd1152eb-kube-api-access-mpj5n\") pod \"rabbitmq-server-0\" (UID: \"523fd020-2e02-4807-93b8-82ecbd1152eb\") " pod="openstack/rabbitmq-server-0" Sep 29 19:23:51 crc kubenswrapper[4779]: I0929 19:23:51.839776 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/523fd020-2e02-4807-93b8-82ecbd1152eb-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"523fd020-2e02-4807-93b8-82ecbd1152eb\") " pod="openstack/rabbitmq-server-0" Sep 29 19:23:51 crc kubenswrapper[4779]: I0929 19:23:51.839796 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/523fd020-2e02-4807-93b8-82ecbd1152eb-server-conf\") pod \"rabbitmq-server-0\" (UID: \"523fd020-2e02-4807-93b8-82ecbd1152eb\") " pod="openstack/rabbitmq-server-0" Sep 29 19:23:51 crc kubenswrapper[4779]: I0929 19:23:51.839819 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/523fd020-2e02-4807-93b8-82ecbd1152eb-pod-info\") pod \"rabbitmq-server-0\" (UID: \"523fd020-2e02-4807-93b8-82ecbd1152eb\") " pod="openstack/rabbitmq-server-0" Sep 29 19:23:51 crc kubenswrapper[4779]: I0929 19:23:51.839842 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/523fd020-2e02-4807-93b8-82ecbd1152eb-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"523fd020-2e02-4807-93b8-82ecbd1152eb\") " pod="openstack/rabbitmq-server-0" Sep 29 19:23:51 crc kubenswrapper[4779]: I0929 19:23:51.839866 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/523fd020-2e02-4807-93b8-82ecbd1152eb-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"523fd020-2e02-4807-93b8-82ecbd1152eb\") " pod="openstack/rabbitmq-server-0" Sep 29 19:23:51 crc kubenswrapper[4779]: I0929 19:23:51.840692 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/523fd020-2e02-4807-93b8-82ecbd1152eb-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"523fd020-2e02-4807-93b8-82ecbd1152eb\") " pod="openstack/rabbitmq-server-0" Sep 29 19:23:51 crc kubenswrapper[4779]: I0929 19:23:51.840930 4779 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"rabbitmq-server-0\" (UID: \"523fd020-2e02-4807-93b8-82ecbd1152eb\") device mount path \"/mnt/openstack/pv04\"" pod="openstack/rabbitmq-server-0" Sep 29 19:23:51 crc kubenswrapper[4779]: I0929 19:23:51.845028 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/523fd020-2e02-4807-93b8-82ecbd1152eb-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"523fd020-2e02-4807-93b8-82ecbd1152eb\") " pod="openstack/rabbitmq-server-0" Sep 29 19:23:51 crc kubenswrapper[4779]: I0929 19:23:51.845347 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/523fd020-2e02-4807-93b8-82ecbd1152eb-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"523fd020-2e02-4807-93b8-82ecbd1152eb\") " pod="openstack/rabbitmq-server-0" Sep 29 19:23:51 crc kubenswrapper[4779]: I0929 19:23:51.845549 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/523fd020-2e02-4807-93b8-82ecbd1152eb-server-conf\") pod \"rabbitmq-server-0\" (UID: \"523fd020-2e02-4807-93b8-82ecbd1152eb\") " pod="openstack/rabbitmq-server-0" Sep 29 19:23:51 crc kubenswrapper[4779]: I0929 19:23:51.848803 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/523fd020-2e02-4807-93b8-82ecbd1152eb-config-data\") pod \"rabbitmq-server-0\" (UID: \"523fd020-2e02-4807-93b8-82ecbd1152eb\") " pod="openstack/rabbitmq-server-0" Sep 29 19:23:51 crc kubenswrapper[4779]: I0929 19:23:51.850028 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/523fd020-2e02-4807-93b8-82ecbd1152eb-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"523fd020-2e02-4807-93b8-82ecbd1152eb\") " pod="openstack/rabbitmq-server-0" Sep 29 19:23:51 crc kubenswrapper[4779]: I0929 19:23:51.850890 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/523fd020-2e02-4807-93b8-82ecbd1152eb-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"523fd020-2e02-4807-93b8-82ecbd1152eb\") " pod="openstack/rabbitmq-server-0" Sep 29 19:23:51 crc kubenswrapper[4779]: I0929 19:23:51.856178 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mpj5n\" (UniqueName: \"kubernetes.io/projected/523fd020-2e02-4807-93b8-82ecbd1152eb-kube-api-access-mpj5n\") pod \"rabbitmq-server-0\" (UID: \"523fd020-2e02-4807-93b8-82ecbd1152eb\") " pod="openstack/rabbitmq-server-0" Sep 29 19:23:51 crc kubenswrapper[4779]: I0929 19:23:51.856623 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/523fd020-2e02-4807-93b8-82ecbd1152eb-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"523fd020-2e02-4807-93b8-82ecbd1152eb\") " pod="openstack/rabbitmq-server-0" Sep 29 19:23:51 crc kubenswrapper[4779]: I0929 19:23:51.857614 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/523fd020-2e02-4807-93b8-82ecbd1152eb-pod-info\") pod \"rabbitmq-server-0\" (UID: \"523fd020-2e02-4807-93b8-82ecbd1152eb\") " pod="openstack/rabbitmq-server-0" Sep 29 19:23:51 crc kubenswrapper[4779]: I0929 19:23:51.859045 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"rabbitmq-server-0\" (UID: \"523fd020-2e02-4807-93b8-82ecbd1152eb\") " pod="openstack/rabbitmq-server-0" Sep 29 19:23:51 crc kubenswrapper[4779]: I0929 19:23:51.938012 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Sep 29 19:23:51 crc kubenswrapper[4779]: I0929 19:23:51.940153 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Sep 29 19:23:51 crc kubenswrapper[4779]: I0929 19:23:51.946285 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-erlang-cookie" Sep 29 19:23:51 crc kubenswrapper[4779]: I0929 19:23:51.946528 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-plugins-conf" Sep 29 19:23:51 crc kubenswrapper[4779]: I0929 19:23:51.947179 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-server-conf" Sep 29 19:23:51 crc kubenswrapper[4779]: I0929 19:23:51.947241 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-server-dockercfg-44wqk" Sep 29 19:23:51 crc kubenswrapper[4779]: I0929 19:23:51.947417 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-cell1-svc" Sep 29 19:23:51 crc kubenswrapper[4779]: I0929 19:23:51.947495 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-default-user" Sep 29 19:23:51 crc kubenswrapper[4779]: I0929 19:23:51.947608 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-config-data" Sep 29 19:23:51 crc kubenswrapper[4779]: I0929 19:23:51.954971 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Sep 29 19:23:51 crc kubenswrapper[4779]: I0929 19:23:51.980148 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Sep 29 19:23:52 crc kubenswrapper[4779]: I0929 19:23:52.043068 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/549de7a5-30db-464d-bd6b-a6dcca25691d-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"549de7a5-30db-464d-bd6b-a6dcca25691d\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 19:23:52 crc kubenswrapper[4779]: I0929 19:23:52.043131 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/549de7a5-30db-464d-bd6b-a6dcca25691d-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"549de7a5-30db-464d-bd6b-a6dcca25691d\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 19:23:52 crc kubenswrapper[4779]: I0929 19:23:52.043168 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hgfgg\" (UniqueName: \"kubernetes.io/projected/549de7a5-30db-464d-bd6b-a6dcca25691d-kube-api-access-hgfgg\") pod \"rabbitmq-cell1-server-0\" (UID: \"549de7a5-30db-464d-bd6b-a6dcca25691d\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 19:23:52 crc kubenswrapper[4779]: I0929 19:23:52.043201 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/549de7a5-30db-464d-bd6b-a6dcca25691d-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"549de7a5-30db-464d-bd6b-a6dcca25691d\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 19:23:52 crc kubenswrapper[4779]: I0929 19:23:52.043232 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/549de7a5-30db-464d-bd6b-a6dcca25691d-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"549de7a5-30db-464d-bd6b-a6dcca25691d\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 19:23:52 crc kubenswrapper[4779]: I0929 19:23:52.043276 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/549de7a5-30db-464d-bd6b-a6dcca25691d-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"549de7a5-30db-464d-bd6b-a6dcca25691d\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 19:23:52 crc kubenswrapper[4779]: I0929 19:23:52.043342 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/549de7a5-30db-464d-bd6b-a6dcca25691d-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"549de7a5-30db-464d-bd6b-a6dcca25691d\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 19:23:52 crc kubenswrapper[4779]: I0929 19:23:52.043371 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/549de7a5-30db-464d-bd6b-a6dcca25691d-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"549de7a5-30db-464d-bd6b-a6dcca25691d\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 19:23:52 crc kubenswrapper[4779]: I0929 19:23:52.043413 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/549de7a5-30db-464d-bd6b-a6dcca25691d-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"549de7a5-30db-464d-bd6b-a6dcca25691d\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 19:23:52 crc kubenswrapper[4779]: I0929 19:23:52.043438 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/549de7a5-30db-464d-bd6b-a6dcca25691d-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"549de7a5-30db-464d-bd6b-a6dcca25691d\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 19:23:52 crc kubenswrapper[4779]: I0929 19:23:52.043477 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"549de7a5-30db-464d-bd6b-a6dcca25691d\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 19:23:52 crc kubenswrapper[4779]: I0929 19:23:52.144537 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/549de7a5-30db-464d-bd6b-a6dcca25691d-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"549de7a5-30db-464d-bd6b-a6dcca25691d\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 19:23:52 crc kubenswrapper[4779]: I0929 19:23:52.144616 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/549de7a5-30db-464d-bd6b-a6dcca25691d-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"549de7a5-30db-464d-bd6b-a6dcca25691d\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 19:23:52 crc kubenswrapper[4779]: I0929 19:23:52.144691 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/549de7a5-30db-464d-bd6b-a6dcca25691d-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"549de7a5-30db-464d-bd6b-a6dcca25691d\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 19:23:52 crc kubenswrapper[4779]: I0929 19:23:52.144723 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/549de7a5-30db-464d-bd6b-a6dcca25691d-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"549de7a5-30db-464d-bd6b-a6dcca25691d\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 19:23:52 crc kubenswrapper[4779]: I0929 19:23:52.144765 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/549de7a5-30db-464d-bd6b-a6dcca25691d-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"549de7a5-30db-464d-bd6b-a6dcca25691d\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 19:23:52 crc kubenswrapper[4779]: I0929 19:23:52.144791 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/549de7a5-30db-464d-bd6b-a6dcca25691d-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"549de7a5-30db-464d-bd6b-a6dcca25691d\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 19:23:52 crc kubenswrapper[4779]: I0929 19:23:52.144827 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"549de7a5-30db-464d-bd6b-a6dcca25691d\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 19:23:52 crc kubenswrapper[4779]: I0929 19:23:52.144871 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/549de7a5-30db-464d-bd6b-a6dcca25691d-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"549de7a5-30db-464d-bd6b-a6dcca25691d\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 19:23:52 crc kubenswrapper[4779]: I0929 19:23:52.144895 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/549de7a5-30db-464d-bd6b-a6dcca25691d-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"549de7a5-30db-464d-bd6b-a6dcca25691d\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 19:23:52 crc kubenswrapper[4779]: I0929 19:23:52.144924 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hgfgg\" (UniqueName: \"kubernetes.io/projected/549de7a5-30db-464d-bd6b-a6dcca25691d-kube-api-access-hgfgg\") pod \"rabbitmq-cell1-server-0\" (UID: \"549de7a5-30db-464d-bd6b-a6dcca25691d\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 19:23:52 crc kubenswrapper[4779]: I0929 19:23:52.144953 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/549de7a5-30db-464d-bd6b-a6dcca25691d-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"549de7a5-30db-464d-bd6b-a6dcca25691d\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 19:23:52 crc kubenswrapper[4779]: I0929 19:23:52.145473 4779 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"549de7a5-30db-464d-bd6b-a6dcca25691d\") device mount path \"/mnt/openstack/pv01\"" pod="openstack/rabbitmq-cell1-server-0" Sep 29 19:23:52 crc kubenswrapper[4779]: I0929 19:23:52.145747 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/549de7a5-30db-464d-bd6b-a6dcca25691d-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"549de7a5-30db-464d-bd6b-a6dcca25691d\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 19:23:52 crc kubenswrapper[4779]: I0929 19:23:52.145946 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/549de7a5-30db-464d-bd6b-a6dcca25691d-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"549de7a5-30db-464d-bd6b-a6dcca25691d\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 19:23:52 crc kubenswrapper[4779]: I0929 19:23:52.146412 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/549de7a5-30db-464d-bd6b-a6dcca25691d-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"549de7a5-30db-464d-bd6b-a6dcca25691d\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 19:23:52 crc kubenswrapper[4779]: I0929 19:23:52.147050 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/549de7a5-30db-464d-bd6b-a6dcca25691d-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"549de7a5-30db-464d-bd6b-a6dcca25691d\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 19:23:52 crc kubenswrapper[4779]: I0929 19:23:52.147816 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/549de7a5-30db-464d-bd6b-a6dcca25691d-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"549de7a5-30db-464d-bd6b-a6dcca25691d\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 19:23:52 crc kubenswrapper[4779]: I0929 19:23:52.149798 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/549de7a5-30db-464d-bd6b-a6dcca25691d-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"549de7a5-30db-464d-bd6b-a6dcca25691d\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 19:23:52 crc kubenswrapper[4779]: I0929 19:23:52.166129 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/549de7a5-30db-464d-bd6b-a6dcca25691d-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"549de7a5-30db-464d-bd6b-a6dcca25691d\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 19:23:52 crc kubenswrapper[4779]: I0929 19:23:52.166906 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/549de7a5-30db-464d-bd6b-a6dcca25691d-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"549de7a5-30db-464d-bd6b-a6dcca25691d\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 19:23:52 crc kubenswrapper[4779]: I0929 19:23:52.167473 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/549de7a5-30db-464d-bd6b-a6dcca25691d-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"549de7a5-30db-464d-bd6b-a6dcca25691d\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 19:23:52 crc kubenswrapper[4779]: I0929 19:23:52.170082 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hgfgg\" (UniqueName: \"kubernetes.io/projected/549de7a5-30db-464d-bd6b-a6dcca25691d-kube-api-access-hgfgg\") pod \"rabbitmq-cell1-server-0\" (UID: \"549de7a5-30db-464d-bd6b-a6dcca25691d\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 19:23:52 crc kubenswrapper[4779]: I0929 19:23:52.173693 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"549de7a5-30db-464d-bd6b-a6dcca25691d\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 19:23:52 crc kubenswrapper[4779]: I0929 19:23:52.273928 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Sep 29 19:23:53 crc kubenswrapper[4779]: I0929 19:23:53.861679 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstack-galera-0"] Sep 29 19:23:53 crc kubenswrapper[4779]: I0929 19:23:53.863355 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Sep 29 19:23:53 crc kubenswrapper[4779]: I0929 19:23:53.865530 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"galera-openstack-dockercfg-x6465" Sep 29 19:23:53 crc kubenswrapper[4779]: I0929 19:23:53.866240 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"osp-secret" Sep 29 19:23:53 crc kubenswrapper[4779]: I0929 19:23:53.866532 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-scripts" Sep 29 19:23:53 crc kubenswrapper[4779]: I0929 19:23:53.866713 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-config-data" Sep 29 19:23:53 crc kubenswrapper[4779]: I0929 19:23:53.867447 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-galera-openstack-svc" Sep 29 19:23:53 crc kubenswrapper[4779]: I0929 19:23:53.877655 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"combined-ca-bundle" Sep 29 19:23:53 crc kubenswrapper[4779]: I0929 19:23:53.879526 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-galera-0"] Sep 29 19:23:53 crc kubenswrapper[4779]: I0929 19:23:53.971524 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/f5cf224b-f179-4c3c-bc69-5f3d448aca1d-config-data-generated\") pod \"openstack-galera-0\" (UID: \"f5cf224b-f179-4c3c-bc69-5f3d448aca1d\") " pod="openstack/openstack-galera-0" Sep 29 19:23:53 crc kubenswrapper[4779]: I0929 19:23:53.971575 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/f5cf224b-f179-4c3c-bc69-5f3d448aca1d-config-data-default\") pod \"openstack-galera-0\" (UID: \"f5cf224b-f179-4c3c-bc69-5f3d448aca1d\") " pod="openstack/openstack-galera-0" Sep 29 19:23:53 crc kubenswrapper[4779]: I0929 19:23:53.971699 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/f5cf224b-f179-4c3c-bc69-5f3d448aca1d-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"f5cf224b-f179-4c3c-bc69-5f3d448aca1d\") " pod="openstack/openstack-galera-0" Sep 29 19:23:53 crc kubenswrapper[4779]: I0929 19:23:53.971758 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6cfwh\" (UniqueName: \"kubernetes.io/projected/f5cf224b-f179-4c3c-bc69-5f3d448aca1d-kube-api-access-6cfwh\") pod \"openstack-galera-0\" (UID: \"f5cf224b-f179-4c3c-bc69-5f3d448aca1d\") " pod="openstack/openstack-galera-0" Sep 29 19:23:53 crc kubenswrapper[4779]: I0929 19:23:53.971794 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"openstack-galera-0\" (UID: \"f5cf224b-f179-4c3c-bc69-5f3d448aca1d\") " pod="openstack/openstack-galera-0" Sep 29 19:23:53 crc kubenswrapper[4779]: I0929 19:23:53.971870 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f5cf224b-f179-4c3c-bc69-5f3d448aca1d-operator-scripts\") pod \"openstack-galera-0\" (UID: \"f5cf224b-f179-4c3c-bc69-5f3d448aca1d\") " pod="openstack/openstack-galera-0" Sep 29 19:23:53 crc kubenswrapper[4779]: I0929 19:23:53.971968 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f5cf224b-f179-4c3c-bc69-5f3d448aca1d-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"f5cf224b-f179-4c3c-bc69-5f3d448aca1d\") " pod="openstack/openstack-galera-0" Sep 29 19:23:53 crc kubenswrapper[4779]: I0929 19:23:53.972001 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/f5cf224b-f179-4c3c-bc69-5f3d448aca1d-kolla-config\") pod \"openstack-galera-0\" (UID: \"f5cf224b-f179-4c3c-bc69-5f3d448aca1d\") " pod="openstack/openstack-galera-0" Sep 29 19:23:53 crc kubenswrapper[4779]: I0929 19:23:53.972027 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/f5cf224b-f179-4c3c-bc69-5f3d448aca1d-secrets\") pod \"openstack-galera-0\" (UID: \"f5cf224b-f179-4c3c-bc69-5f3d448aca1d\") " pod="openstack/openstack-galera-0" Sep 29 19:23:54 crc kubenswrapper[4779]: I0929 19:23:54.073672 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/f5cf224b-f179-4c3c-bc69-5f3d448aca1d-config-data-generated\") pod \"openstack-galera-0\" (UID: \"f5cf224b-f179-4c3c-bc69-5f3d448aca1d\") " pod="openstack/openstack-galera-0" Sep 29 19:23:54 crc kubenswrapper[4779]: I0929 19:23:54.073734 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/f5cf224b-f179-4c3c-bc69-5f3d448aca1d-config-data-default\") pod \"openstack-galera-0\" (UID: \"f5cf224b-f179-4c3c-bc69-5f3d448aca1d\") " pod="openstack/openstack-galera-0" Sep 29 19:23:54 crc kubenswrapper[4779]: I0929 19:23:54.073766 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/f5cf224b-f179-4c3c-bc69-5f3d448aca1d-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"f5cf224b-f179-4c3c-bc69-5f3d448aca1d\") " pod="openstack/openstack-galera-0" Sep 29 19:23:54 crc kubenswrapper[4779]: I0929 19:23:54.073787 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6cfwh\" (UniqueName: \"kubernetes.io/projected/f5cf224b-f179-4c3c-bc69-5f3d448aca1d-kube-api-access-6cfwh\") pod \"openstack-galera-0\" (UID: \"f5cf224b-f179-4c3c-bc69-5f3d448aca1d\") " pod="openstack/openstack-galera-0" Sep 29 19:23:54 crc kubenswrapper[4779]: I0929 19:23:54.073808 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"openstack-galera-0\" (UID: \"f5cf224b-f179-4c3c-bc69-5f3d448aca1d\") " pod="openstack/openstack-galera-0" Sep 29 19:23:54 crc kubenswrapper[4779]: I0929 19:23:54.073834 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f5cf224b-f179-4c3c-bc69-5f3d448aca1d-operator-scripts\") pod \"openstack-galera-0\" (UID: \"f5cf224b-f179-4c3c-bc69-5f3d448aca1d\") " pod="openstack/openstack-galera-0" Sep 29 19:23:54 crc kubenswrapper[4779]: I0929 19:23:54.073863 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f5cf224b-f179-4c3c-bc69-5f3d448aca1d-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"f5cf224b-f179-4c3c-bc69-5f3d448aca1d\") " pod="openstack/openstack-galera-0" Sep 29 19:23:54 crc kubenswrapper[4779]: I0929 19:23:54.073881 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/f5cf224b-f179-4c3c-bc69-5f3d448aca1d-kolla-config\") pod \"openstack-galera-0\" (UID: \"f5cf224b-f179-4c3c-bc69-5f3d448aca1d\") " pod="openstack/openstack-galera-0" Sep 29 19:23:54 crc kubenswrapper[4779]: I0929 19:23:54.073898 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/f5cf224b-f179-4c3c-bc69-5f3d448aca1d-secrets\") pod \"openstack-galera-0\" (UID: \"f5cf224b-f179-4c3c-bc69-5f3d448aca1d\") " pod="openstack/openstack-galera-0" Sep 29 19:23:54 crc kubenswrapper[4779]: I0929 19:23:54.074215 4779 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"openstack-galera-0\" (UID: \"f5cf224b-f179-4c3c-bc69-5f3d448aca1d\") device mount path \"/mnt/openstack/pv07\"" pod="openstack/openstack-galera-0" Sep 29 19:23:54 crc kubenswrapper[4779]: I0929 19:23:54.074811 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/f5cf224b-f179-4c3c-bc69-5f3d448aca1d-kolla-config\") pod \"openstack-galera-0\" (UID: \"f5cf224b-f179-4c3c-bc69-5f3d448aca1d\") " pod="openstack/openstack-galera-0" Sep 29 19:23:54 crc kubenswrapper[4779]: I0929 19:23:54.074909 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/f5cf224b-f179-4c3c-bc69-5f3d448aca1d-config-data-default\") pod \"openstack-galera-0\" (UID: \"f5cf224b-f179-4c3c-bc69-5f3d448aca1d\") " pod="openstack/openstack-galera-0" Sep 29 19:23:54 crc kubenswrapper[4779]: I0929 19:23:54.075063 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/f5cf224b-f179-4c3c-bc69-5f3d448aca1d-config-data-generated\") pod \"openstack-galera-0\" (UID: \"f5cf224b-f179-4c3c-bc69-5f3d448aca1d\") " pod="openstack/openstack-galera-0" Sep 29 19:23:54 crc kubenswrapper[4779]: I0929 19:23:54.075819 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f5cf224b-f179-4c3c-bc69-5f3d448aca1d-operator-scripts\") pod \"openstack-galera-0\" (UID: \"f5cf224b-f179-4c3c-bc69-5f3d448aca1d\") " pod="openstack/openstack-galera-0" Sep 29 19:23:54 crc kubenswrapper[4779]: I0929 19:23:54.080001 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/f5cf224b-f179-4c3c-bc69-5f3d448aca1d-secrets\") pod \"openstack-galera-0\" (UID: \"f5cf224b-f179-4c3c-bc69-5f3d448aca1d\") " pod="openstack/openstack-galera-0" Sep 29 19:23:54 crc kubenswrapper[4779]: I0929 19:23:54.080026 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/f5cf224b-f179-4c3c-bc69-5f3d448aca1d-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"f5cf224b-f179-4c3c-bc69-5f3d448aca1d\") " pod="openstack/openstack-galera-0" Sep 29 19:23:54 crc kubenswrapper[4779]: I0929 19:23:54.082976 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f5cf224b-f179-4c3c-bc69-5f3d448aca1d-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"f5cf224b-f179-4c3c-bc69-5f3d448aca1d\") " pod="openstack/openstack-galera-0" Sep 29 19:23:54 crc kubenswrapper[4779]: I0929 19:23:54.089794 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6cfwh\" (UniqueName: \"kubernetes.io/projected/f5cf224b-f179-4c3c-bc69-5f3d448aca1d-kube-api-access-6cfwh\") pod \"openstack-galera-0\" (UID: \"f5cf224b-f179-4c3c-bc69-5f3d448aca1d\") " pod="openstack/openstack-galera-0" Sep 29 19:23:54 crc kubenswrapper[4779]: I0929 19:23:54.091528 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"openstack-galera-0\" (UID: \"f5cf224b-f179-4c3c-bc69-5f3d448aca1d\") " pod="openstack/openstack-galera-0" Sep 29 19:23:54 crc kubenswrapper[4779]: I0929 19:23:54.179937 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Sep 29 19:23:54 crc kubenswrapper[4779]: I0929 19:23:54.746867 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstack-cell1-galera-0"] Sep 29 19:23:54 crc kubenswrapper[4779]: I0929 19:23:54.748161 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Sep 29 19:23:54 crc kubenswrapper[4779]: I0929 19:23:54.753547 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1-config-data" Sep 29 19:23:54 crc kubenswrapper[4779]: I0929 19:23:54.755273 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1-scripts" Sep 29 19:23:54 crc kubenswrapper[4779]: I0929 19:23:54.755977 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"galera-openstack-cell1-dockercfg-qf658" Sep 29 19:23:54 crc kubenswrapper[4779]: I0929 19:23:54.758430 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-galera-openstack-cell1-svc" Sep 29 19:23:54 crc kubenswrapper[4779]: I0929 19:23:54.777004 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-cell1-galera-0"] Sep 29 19:23:54 crc kubenswrapper[4779]: I0929 19:23:54.782904 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/31200251-0f84-4946-88fb-276aa79589d9-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"31200251-0f84-4946-88fb-276aa79589d9\") " pod="openstack/openstack-cell1-galera-0" Sep 29 19:23:54 crc kubenswrapper[4779]: I0929 19:23:54.783069 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/31200251-0f84-4946-88fb-276aa79589d9-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"31200251-0f84-4946-88fb-276aa79589d9\") " pod="openstack/openstack-cell1-galera-0" Sep 29 19:23:54 crc kubenswrapper[4779]: I0929 19:23:54.783168 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/31200251-0f84-4946-88fb-276aa79589d9-secrets\") pod \"openstack-cell1-galera-0\" (UID: \"31200251-0f84-4946-88fb-276aa79589d9\") " pod="openstack/openstack-cell1-galera-0" Sep 29 19:23:54 crc kubenswrapper[4779]: I0929 19:23:54.783209 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"openstack-cell1-galera-0\" (UID: \"31200251-0f84-4946-88fb-276aa79589d9\") " pod="openstack/openstack-cell1-galera-0" Sep 29 19:23:54 crc kubenswrapper[4779]: I0929 19:23:54.783248 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/31200251-0f84-4946-88fb-276aa79589d9-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"31200251-0f84-4946-88fb-276aa79589d9\") " pod="openstack/openstack-cell1-galera-0" Sep 29 19:23:54 crc kubenswrapper[4779]: I0929 19:23:54.783282 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/31200251-0f84-4946-88fb-276aa79589d9-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"31200251-0f84-4946-88fb-276aa79589d9\") " pod="openstack/openstack-cell1-galera-0" Sep 29 19:23:54 crc kubenswrapper[4779]: I0929 19:23:54.783306 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tnsnr\" (UniqueName: \"kubernetes.io/projected/31200251-0f84-4946-88fb-276aa79589d9-kube-api-access-tnsnr\") pod \"openstack-cell1-galera-0\" (UID: \"31200251-0f84-4946-88fb-276aa79589d9\") " pod="openstack/openstack-cell1-galera-0" Sep 29 19:23:54 crc kubenswrapper[4779]: I0929 19:23:54.783371 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/31200251-0f84-4946-88fb-276aa79589d9-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"31200251-0f84-4946-88fb-276aa79589d9\") " pod="openstack/openstack-cell1-galera-0" Sep 29 19:23:54 crc kubenswrapper[4779]: I0929 19:23:54.783433 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/31200251-0f84-4946-88fb-276aa79589d9-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"31200251-0f84-4946-88fb-276aa79589d9\") " pod="openstack/openstack-cell1-galera-0" Sep 29 19:23:54 crc kubenswrapper[4779]: I0929 19:23:54.884652 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/31200251-0f84-4946-88fb-276aa79589d9-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"31200251-0f84-4946-88fb-276aa79589d9\") " pod="openstack/openstack-cell1-galera-0" Sep 29 19:23:54 crc kubenswrapper[4779]: I0929 19:23:54.884774 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/31200251-0f84-4946-88fb-276aa79589d9-secrets\") pod \"openstack-cell1-galera-0\" (UID: \"31200251-0f84-4946-88fb-276aa79589d9\") " pod="openstack/openstack-cell1-galera-0" Sep 29 19:23:54 crc kubenswrapper[4779]: I0929 19:23:54.884823 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"openstack-cell1-galera-0\" (UID: \"31200251-0f84-4946-88fb-276aa79589d9\") " pod="openstack/openstack-cell1-galera-0" Sep 29 19:23:54 crc kubenswrapper[4779]: I0929 19:23:54.884858 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/31200251-0f84-4946-88fb-276aa79589d9-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"31200251-0f84-4946-88fb-276aa79589d9\") " pod="openstack/openstack-cell1-galera-0" Sep 29 19:23:54 crc kubenswrapper[4779]: I0929 19:23:54.884895 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/31200251-0f84-4946-88fb-276aa79589d9-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"31200251-0f84-4946-88fb-276aa79589d9\") " pod="openstack/openstack-cell1-galera-0" Sep 29 19:23:54 crc kubenswrapper[4779]: I0929 19:23:54.884926 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tnsnr\" (UniqueName: \"kubernetes.io/projected/31200251-0f84-4946-88fb-276aa79589d9-kube-api-access-tnsnr\") pod \"openstack-cell1-galera-0\" (UID: \"31200251-0f84-4946-88fb-276aa79589d9\") " pod="openstack/openstack-cell1-galera-0" Sep 29 19:23:54 crc kubenswrapper[4779]: I0929 19:23:54.884972 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/31200251-0f84-4946-88fb-276aa79589d9-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"31200251-0f84-4946-88fb-276aa79589d9\") " pod="openstack/openstack-cell1-galera-0" Sep 29 19:23:54 crc kubenswrapper[4779]: I0929 19:23:54.885033 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/31200251-0f84-4946-88fb-276aa79589d9-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"31200251-0f84-4946-88fb-276aa79589d9\") " pod="openstack/openstack-cell1-galera-0" Sep 29 19:23:54 crc kubenswrapper[4779]: I0929 19:23:54.885094 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/31200251-0f84-4946-88fb-276aa79589d9-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"31200251-0f84-4946-88fb-276aa79589d9\") " pod="openstack/openstack-cell1-galera-0" Sep 29 19:23:54 crc kubenswrapper[4779]: I0929 19:23:54.885764 4779 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"openstack-cell1-galera-0\" (UID: \"31200251-0f84-4946-88fb-276aa79589d9\") device mount path \"/mnt/openstack/pv05\"" pod="openstack/openstack-cell1-galera-0" Sep 29 19:23:54 crc kubenswrapper[4779]: I0929 19:23:54.885847 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/31200251-0f84-4946-88fb-276aa79589d9-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"31200251-0f84-4946-88fb-276aa79589d9\") " pod="openstack/openstack-cell1-galera-0" Sep 29 19:23:54 crc kubenswrapper[4779]: I0929 19:23:54.887041 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/31200251-0f84-4946-88fb-276aa79589d9-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"31200251-0f84-4946-88fb-276aa79589d9\") " pod="openstack/openstack-cell1-galera-0" Sep 29 19:23:54 crc kubenswrapper[4779]: I0929 19:23:54.887487 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/31200251-0f84-4946-88fb-276aa79589d9-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"31200251-0f84-4946-88fb-276aa79589d9\") " pod="openstack/openstack-cell1-galera-0" Sep 29 19:23:54 crc kubenswrapper[4779]: I0929 19:23:54.891815 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/31200251-0f84-4946-88fb-276aa79589d9-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"31200251-0f84-4946-88fb-276aa79589d9\") " pod="openstack/openstack-cell1-galera-0" Sep 29 19:23:54 crc kubenswrapper[4779]: I0929 19:23:54.892417 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/31200251-0f84-4946-88fb-276aa79589d9-secrets\") pod \"openstack-cell1-galera-0\" (UID: \"31200251-0f84-4946-88fb-276aa79589d9\") " pod="openstack/openstack-cell1-galera-0" Sep 29 19:23:54 crc kubenswrapper[4779]: I0929 19:23:54.892312 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/31200251-0f84-4946-88fb-276aa79589d9-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"31200251-0f84-4946-88fb-276aa79589d9\") " pod="openstack/openstack-cell1-galera-0" Sep 29 19:23:54 crc kubenswrapper[4779]: I0929 19:23:54.893473 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/31200251-0f84-4946-88fb-276aa79589d9-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"31200251-0f84-4946-88fb-276aa79589d9\") " pod="openstack/openstack-cell1-galera-0" Sep 29 19:23:54 crc kubenswrapper[4779]: I0929 19:23:54.911964 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tnsnr\" (UniqueName: \"kubernetes.io/projected/31200251-0f84-4946-88fb-276aa79589d9-kube-api-access-tnsnr\") pod \"openstack-cell1-galera-0\" (UID: \"31200251-0f84-4946-88fb-276aa79589d9\") " pod="openstack/openstack-cell1-galera-0" Sep 29 19:23:54 crc kubenswrapper[4779]: I0929 19:23:54.930605 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"openstack-cell1-galera-0\" (UID: \"31200251-0f84-4946-88fb-276aa79589d9\") " pod="openstack/openstack-cell1-galera-0" Sep 29 19:23:55 crc kubenswrapper[4779]: I0929 19:23:55.069030 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Sep 29 19:23:55 crc kubenswrapper[4779]: I0929 19:23:55.254290 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/memcached-0"] Sep 29 19:23:55 crc kubenswrapper[4779]: I0929 19:23:55.255493 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Sep 29 19:23:55 crc kubenswrapper[4779]: I0929 19:23:55.260484 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-memcached-svc" Sep 29 19:23:55 crc kubenswrapper[4779]: I0929 19:23:55.260810 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"memcached-config-data" Sep 29 19:23:55 crc kubenswrapper[4779]: I0929 19:23:55.261033 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"memcached-memcached-dockercfg-cdnjg" Sep 29 19:23:55 crc kubenswrapper[4779]: I0929 19:23:55.268184 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/memcached-0"] Sep 29 19:23:55 crc kubenswrapper[4779]: I0929 19:23:55.292084 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/8de5fc49-d446-41aa-aa5e-d32fd04a281e-kolla-config\") pod \"memcached-0\" (UID: \"8de5fc49-d446-41aa-aa5e-d32fd04a281e\") " pod="openstack/memcached-0" Sep 29 19:23:55 crc kubenswrapper[4779]: I0929 19:23:55.292145 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/8de5fc49-d446-41aa-aa5e-d32fd04a281e-memcached-tls-certs\") pod \"memcached-0\" (UID: \"8de5fc49-d446-41aa-aa5e-d32fd04a281e\") " pod="openstack/memcached-0" Sep 29 19:23:55 crc kubenswrapper[4779]: I0929 19:23:55.292184 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/8de5fc49-d446-41aa-aa5e-d32fd04a281e-config-data\") pod \"memcached-0\" (UID: \"8de5fc49-d446-41aa-aa5e-d32fd04a281e\") " pod="openstack/memcached-0" Sep 29 19:23:55 crc kubenswrapper[4779]: I0929 19:23:55.292204 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8de5fc49-d446-41aa-aa5e-d32fd04a281e-combined-ca-bundle\") pod \"memcached-0\" (UID: \"8de5fc49-d446-41aa-aa5e-d32fd04a281e\") " pod="openstack/memcached-0" Sep 29 19:23:55 crc kubenswrapper[4779]: I0929 19:23:55.292228 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j69sm\" (UniqueName: \"kubernetes.io/projected/8de5fc49-d446-41aa-aa5e-d32fd04a281e-kube-api-access-j69sm\") pod \"memcached-0\" (UID: \"8de5fc49-d446-41aa-aa5e-d32fd04a281e\") " pod="openstack/memcached-0" Sep 29 19:23:55 crc kubenswrapper[4779]: I0929 19:23:55.393530 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/8de5fc49-d446-41aa-aa5e-d32fd04a281e-kolla-config\") pod \"memcached-0\" (UID: \"8de5fc49-d446-41aa-aa5e-d32fd04a281e\") " pod="openstack/memcached-0" Sep 29 19:23:55 crc kubenswrapper[4779]: I0929 19:23:55.393590 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/8de5fc49-d446-41aa-aa5e-d32fd04a281e-memcached-tls-certs\") pod \"memcached-0\" (UID: \"8de5fc49-d446-41aa-aa5e-d32fd04a281e\") " pod="openstack/memcached-0" Sep 29 19:23:55 crc kubenswrapper[4779]: I0929 19:23:55.393615 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/8de5fc49-d446-41aa-aa5e-d32fd04a281e-config-data\") pod \"memcached-0\" (UID: \"8de5fc49-d446-41aa-aa5e-d32fd04a281e\") " pod="openstack/memcached-0" Sep 29 19:23:55 crc kubenswrapper[4779]: I0929 19:23:55.393633 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8de5fc49-d446-41aa-aa5e-d32fd04a281e-combined-ca-bundle\") pod \"memcached-0\" (UID: \"8de5fc49-d446-41aa-aa5e-d32fd04a281e\") " pod="openstack/memcached-0" Sep 29 19:23:55 crc kubenswrapper[4779]: I0929 19:23:55.393659 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j69sm\" (UniqueName: \"kubernetes.io/projected/8de5fc49-d446-41aa-aa5e-d32fd04a281e-kube-api-access-j69sm\") pod \"memcached-0\" (UID: \"8de5fc49-d446-41aa-aa5e-d32fd04a281e\") " pod="openstack/memcached-0" Sep 29 19:23:55 crc kubenswrapper[4779]: I0929 19:23:55.394637 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/8de5fc49-d446-41aa-aa5e-d32fd04a281e-kolla-config\") pod \"memcached-0\" (UID: \"8de5fc49-d446-41aa-aa5e-d32fd04a281e\") " pod="openstack/memcached-0" Sep 29 19:23:55 crc kubenswrapper[4779]: I0929 19:23:55.395626 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/8de5fc49-d446-41aa-aa5e-d32fd04a281e-config-data\") pod \"memcached-0\" (UID: \"8de5fc49-d446-41aa-aa5e-d32fd04a281e\") " pod="openstack/memcached-0" Sep 29 19:23:55 crc kubenswrapper[4779]: I0929 19:23:55.399878 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/8de5fc49-d446-41aa-aa5e-d32fd04a281e-memcached-tls-certs\") pod \"memcached-0\" (UID: \"8de5fc49-d446-41aa-aa5e-d32fd04a281e\") " pod="openstack/memcached-0" Sep 29 19:23:55 crc kubenswrapper[4779]: I0929 19:23:55.400022 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8de5fc49-d446-41aa-aa5e-d32fd04a281e-combined-ca-bundle\") pod \"memcached-0\" (UID: \"8de5fc49-d446-41aa-aa5e-d32fd04a281e\") " pod="openstack/memcached-0" Sep 29 19:23:55 crc kubenswrapper[4779]: I0929 19:23:55.413172 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j69sm\" (UniqueName: \"kubernetes.io/projected/8de5fc49-d446-41aa-aa5e-d32fd04a281e-kube-api-access-j69sm\") pod \"memcached-0\" (UID: \"8de5fc49-d446-41aa-aa5e-d32fd04a281e\") " pod="openstack/memcached-0" Sep 29 19:23:55 crc kubenswrapper[4779]: I0929 19:23:55.431574 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-666b6646f7-79mzc" event={"ID":"7687e8ef-621c-432c-8153-dbc11707461f","Type":"ContainerStarted","Data":"895bb25f895aa3004f41f82cfd05c8e848078250c34fd757828fec8cbd3d98cc"} Sep 29 19:23:55 crc kubenswrapper[4779]: I0929 19:23:55.575925 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Sep 29 19:23:57 crc kubenswrapper[4779]: I0929 19:23:57.269618 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/kube-state-metrics-0"] Sep 29 19:23:57 crc kubenswrapper[4779]: I0929 19:23:57.270806 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Sep 29 19:23:57 crc kubenswrapper[4779]: I0929 19:23:57.272950 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"telemetry-ceilometer-dockercfg-x59cr" Sep 29 19:23:57 crc kubenswrapper[4779]: I0929 19:23:57.280568 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Sep 29 19:23:57 crc kubenswrapper[4779]: I0929 19:23:57.424963 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tcck6\" (UniqueName: \"kubernetes.io/projected/ebdb9bba-7f1b-4b21-b4e8-df31d3a8684c-kube-api-access-tcck6\") pod \"kube-state-metrics-0\" (UID: \"ebdb9bba-7f1b-4b21-b4e8-df31d3a8684c\") " pod="openstack/kube-state-metrics-0" Sep 29 19:23:57 crc kubenswrapper[4779]: I0929 19:23:57.527562 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tcck6\" (UniqueName: \"kubernetes.io/projected/ebdb9bba-7f1b-4b21-b4e8-df31d3a8684c-kube-api-access-tcck6\") pod \"kube-state-metrics-0\" (UID: \"ebdb9bba-7f1b-4b21-b4e8-df31d3a8684c\") " pod="openstack/kube-state-metrics-0" Sep 29 19:23:57 crc kubenswrapper[4779]: I0929 19:23:57.545661 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tcck6\" (UniqueName: \"kubernetes.io/projected/ebdb9bba-7f1b-4b21-b4e8-df31d3a8684c-kube-api-access-tcck6\") pod \"kube-state-metrics-0\" (UID: \"ebdb9bba-7f1b-4b21-b4e8-df31d3a8684c\") " pod="openstack/kube-state-metrics-0" Sep 29 19:23:57 crc kubenswrapper[4779]: I0929 19:23:57.586442 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Sep 29 19:24:01 crc kubenswrapper[4779]: I0929 19:24:01.868489 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-nb-0"] Sep 29 19:24:01 crc kubenswrapper[4779]: I0929 19:24:01.871692 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Sep 29 19:24:01 crc kubenswrapper[4779]: I0929 19:24:01.875543 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-nb-scripts" Sep 29 19:24:01 crc kubenswrapper[4779]: I0929 19:24:01.876815 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovndbcluster-nb-ovndbs" Sep 29 19:24:01 crc kubenswrapper[4779]: I0929 19:24:01.876979 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncluster-ovndbcluster-nb-dockercfg-xj7zw" Sep 29 19:24:01 crc kubenswrapper[4779]: I0929 19:24:01.877908 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-nb-config" Sep 29 19:24:01 crc kubenswrapper[4779]: I0929 19:24:01.882665 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-0"] Sep 29 19:24:01 crc kubenswrapper[4779]: I0929 19:24:01.885486 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovn-metrics" Sep 29 19:24:02 crc kubenswrapper[4779]: I0929 19:24:02.004345 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/79136f36-d427-488e-81e2-ef55c73ee91a-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"79136f36-d427-488e-81e2-ef55c73ee91a\") " pod="openstack/ovsdbserver-nb-0" Sep 29 19:24:02 crc kubenswrapper[4779]: I0929 19:24:02.004401 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/79136f36-d427-488e-81e2-ef55c73ee91a-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"79136f36-d427-488e-81e2-ef55c73ee91a\") " pod="openstack/ovsdbserver-nb-0" Sep 29 19:24:02 crc kubenswrapper[4779]: I0929 19:24:02.004455 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/79136f36-d427-488e-81e2-ef55c73ee91a-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"79136f36-d427-488e-81e2-ef55c73ee91a\") " pod="openstack/ovsdbserver-nb-0" Sep 29 19:24:02 crc kubenswrapper[4779]: I0929 19:24:02.004500 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/79136f36-d427-488e-81e2-ef55c73ee91a-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"79136f36-d427-488e-81e2-ef55c73ee91a\") " pod="openstack/ovsdbserver-nb-0" Sep 29 19:24:02 crc kubenswrapper[4779]: I0929 19:24:02.004528 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/79136f36-d427-488e-81e2-ef55c73ee91a-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"79136f36-d427-488e-81e2-ef55c73ee91a\") " pod="openstack/ovsdbserver-nb-0" Sep 29 19:24:02 crc kubenswrapper[4779]: I0929 19:24:02.004553 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"ovsdbserver-nb-0\" (UID: \"79136f36-d427-488e-81e2-ef55c73ee91a\") " pod="openstack/ovsdbserver-nb-0" Sep 29 19:24:02 crc kubenswrapper[4779]: I0929 19:24:02.006265 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zqzwf\" (UniqueName: \"kubernetes.io/projected/79136f36-d427-488e-81e2-ef55c73ee91a-kube-api-access-zqzwf\") pod \"ovsdbserver-nb-0\" (UID: \"79136f36-d427-488e-81e2-ef55c73ee91a\") " pod="openstack/ovsdbserver-nb-0" Sep 29 19:24:02 crc kubenswrapper[4779]: I0929 19:24:02.006378 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/79136f36-d427-488e-81e2-ef55c73ee91a-config\") pod \"ovsdbserver-nb-0\" (UID: \"79136f36-d427-488e-81e2-ef55c73ee91a\") " pod="openstack/ovsdbserver-nb-0" Sep 29 19:24:02 crc kubenswrapper[4779]: I0929 19:24:02.108008 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/79136f36-d427-488e-81e2-ef55c73ee91a-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"79136f36-d427-488e-81e2-ef55c73ee91a\") " pod="openstack/ovsdbserver-nb-0" Sep 29 19:24:02 crc kubenswrapper[4779]: I0929 19:24:02.108429 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/79136f36-d427-488e-81e2-ef55c73ee91a-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"79136f36-d427-488e-81e2-ef55c73ee91a\") " pod="openstack/ovsdbserver-nb-0" Sep 29 19:24:02 crc kubenswrapper[4779]: I0929 19:24:02.108482 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/79136f36-d427-488e-81e2-ef55c73ee91a-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"79136f36-d427-488e-81e2-ef55c73ee91a\") " pod="openstack/ovsdbserver-nb-0" Sep 29 19:24:02 crc kubenswrapper[4779]: I0929 19:24:02.108535 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"ovsdbserver-nb-0\" (UID: \"79136f36-d427-488e-81e2-ef55c73ee91a\") " pod="openstack/ovsdbserver-nb-0" Sep 29 19:24:02 crc kubenswrapper[4779]: I0929 19:24:02.108605 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zqzwf\" (UniqueName: \"kubernetes.io/projected/79136f36-d427-488e-81e2-ef55c73ee91a-kube-api-access-zqzwf\") pod \"ovsdbserver-nb-0\" (UID: \"79136f36-d427-488e-81e2-ef55c73ee91a\") " pod="openstack/ovsdbserver-nb-0" Sep 29 19:24:02 crc kubenswrapper[4779]: I0929 19:24:02.108669 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/79136f36-d427-488e-81e2-ef55c73ee91a-config\") pod \"ovsdbserver-nb-0\" (UID: \"79136f36-d427-488e-81e2-ef55c73ee91a\") " pod="openstack/ovsdbserver-nb-0" Sep 29 19:24:02 crc kubenswrapper[4779]: I0929 19:24:02.108783 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/79136f36-d427-488e-81e2-ef55c73ee91a-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"79136f36-d427-488e-81e2-ef55c73ee91a\") " pod="openstack/ovsdbserver-nb-0" Sep 29 19:24:02 crc kubenswrapper[4779]: I0929 19:24:02.108824 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/79136f36-d427-488e-81e2-ef55c73ee91a-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"79136f36-d427-488e-81e2-ef55c73ee91a\") " pod="openstack/ovsdbserver-nb-0" Sep 29 19:24:02 crc kubenswrapper[4779]: I0929 19:24:02.108950 4779 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"ovsdbserver-nb-0\" (UID: \"79136f36-d427-488e-81e2-ef55c73ee91a\") device mount path \"/mnt/openstack/pv09\"" pod="openstack/ovsdbserver-nb-0" Sep 29 19:24:02 crc kubenswrapper[4779]: I0929 19:24:02.113908 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/79136f36-d427-488e-81e2-ef55c73ee91a-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"79136f36-d427-488e-81e2-ef55c73ee91a\") " pod="openstack/ovsdbserver-nb-0" Sep 29 19:24:02 crc kubenswrapper[4779]: I0929 19:24:02.114442 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/79136f36-d427-488e-81e2-ef55c73ee91a-config\") pod \"ovsdbserver-nb-0\" (UID: \"79136f36-d427-488e-81e2-ef55c73ee91a\") " pod="openstack/ovsdbserver-nb-0" Sep 29 19:24:02 crc kubenswrapper[4779]: I0929 19:24:02.114858 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/79136f36-d427-488e-81e2-ef55c73ee91a-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"79136f36-d427-488e-81e2-ef55c73ee91a\") " pod="openstack/ovsdbserver-nb-0" Sep 29 19:24:02 crc kubenswrapper[4779]: I0929 19:24:02.118032 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/79136f36-d427-488e-81e2-ef55c73ee91a-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"79136f36-d427-488e-81e2-ef55c73ee91a\") " pod="openstack/ovsdbserver-nb-0" Sep 29 19:24:02 crc kubenswrapper[4779]: I0929 19:24:02.118187 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/79136f36-d427-488e-81e2-ef55c73ee91a-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"79136f36-d427-488e-81e2-ef55c73ee91a\") " pod="openstack/ovsdbserver-nb-0" Sep 29 19:24:02 crc kubenswrapper[4779]: I0929 19:24:02.124471 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/79136f36-d427-488e-81e2-ef55c73ee91a-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"79136f36-d427-488e-81e2-ef55c73ee91a\") " pod="openstack/ovsdbserver-nb-0" Sep 29 19:24:02 crc kubenswrapper[4779]: I0929 19:24:02.135060 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zqzwf\" (UniqueName: \"kubernetes.io/projected/79136f36-d427-488e-81e2-ef55c73ee91a-kube-api-access-zqzwf\") pod \"ovsdbserver-nb-0\" (UID: \"79136f36-d427-488e-81e2-ef55c73ee91a\") " pod="openstack/ovsdbserver-nb-0" Sep 29 19:24:02 crc kubenswrapper[4779]: I0929 19:24:02.148566 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"ovsdbserver-nb-0\" (UID: \"79136f36-d427-488e-81e2-ef55c73ee91a\") " pod="openstack/ovsdbserver-nb-0" Sep 29 19:24:02 crc kubenswrapper[4779]: I0929 19:24:02.214035 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Sep 29 19:24:02 crc kubenswrapper[4779]: I0929 19:24:02.242084 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-rwqzd"] Sep 29 19:24:02 crc kubenswrapper[4779]: I0929 19:24:02.242990 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-rwqzd" Sep 29 19:24:02 crc kubenswrapper[4779]: I0929 19:24:02.246982 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncontroller-ovncontroller-dockercfg-lqlfs" Sep 29 19:24:02 crc kubenswrapper[4779]: I0929 19:24:02.248866 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-scripts" Sep 29 19:24:02 crc kubenswrapper[4779]: I0929 19:24:02.255989 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-rwqzd"] Sep 29 19:24:02 crc kubenswrapper[4779]: I0929 19:24:02.261707 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovncontroller-ovndbs" Sep 29 19:24:02 crc kubenswrapper[4779]: I0929 19:24:02.262720 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-ovs-lx5dn"] Sep 29 19:24:02 crc kubenswrapper[4779]: I0929 19:24:02.264386 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-lx5dn" Sep 29 19:24:02 crc kubenswrapper[4779]: I0929 19:24:02.273846 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-ovs-lx5dn"] Sep 29 19:24:02 crc kubenswrapper[4779]: I0929 19:24:02.414546 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lwslp\" (UniqueName: \"kubernetes.io/projected/8cebec61-5d6f-4bd2-a9e7-61f0b0f74751-kube-api-access-lwslp\") pod \"ovn-controller-ovs-lx5dn\" (UID: \"8cebec61-5d6f-4bd2-a9e7-61f0b0f74751\") " pod="openstack/ovn-controller-ovs-lx5dn" Sep 29 19:24:02 crc kubenswrapper[4779]: I0929 19:24:02.414589 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/9771c712-00ce-4dcf-ab04-7b6893c8725c-scripts\") pod \"ovn-controller-rwqzd\" (UID: \"9771c712-00ce-4dcf-ab04-7b6893c8725c\") " pod="openstack/ovn-controller-rwqzd" Sep 29 19:24:02 crc kubenswrapper[4779]: I0929 19:24:02.414615 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/8cebec61-5d6f-4bd2-a9e7-61f0b0f74751-scripts\") pod \"ovn-controller-ovs-lx5dn\" (UID: \"8cebec61-5d6f-4bd2-a9e7-61f0b0f74751\") " pod="openstack/ovn-controller-ovs-lx5dn" Sep 29 19:24:02 crc kubenswrapper[4779]: I0929 19:24:02.414636 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/8cebec61-5d6f-4bd2-a9e7-61f0b0f74751-etc-ovs\") pod \"ovn-controller-ovs-lx5dn\" (UID: \"8cebec61-5d6f-4bd2-a9e7-61f0b0f74751\") " pod="openstack/ovn-controller-ovs-lx5dn" Sep 29 19:24:02 crc kubenswrapper[4779]: I0929 19:24:02.414672 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/8cebec61-5d6f-4bd2-a9e7-61f0b0f74751-var-lib\") pod \"ovn-controller-ovs-lx5dn\" (UID: \"8cebec61-5d6f-4bd2-a9e7-61f0b0f74751\") " pod="openstack/ovn-controller-ovs-lx5dn" Sep 29 19:24:02 crc kubenswrapper[4779]: I0929 19:24:02.414797 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/8cebec61-5d6f-4bd2-a9e7-61f0b0f74751-var-run\") pod \"ovn-controller-ovs-lx5dn\" (UID: \"8cebec61-5d6f-4bd2-a9e7-61f0b0f74751\") " pod="openstack/ovn-controller-ovs-lx5dn" Sep 29 19:24:02 crc kubenswrapper[4779]: I0929 19:24:02.414884 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/9771c712-00ce-4dcf-ab04-7b6893c8725c-var-log-ovn\") pod \"ovn-controller-rwqzd\" (UID: \"9771c712-00ce-4dcf-ab04-7b6893c8725c\") " pod="openstack/ovn-controller-rwqzd" Sep 29 19:24:02 crc kubenswrapper[4779]: I0929 19:24:02.414909 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/9771c712-00ce-4dcf-ab04-7b6893c8725c-ovn-controller-tls-certs\") pod \"ovn-controller-rwqzd\" (UID: \"9771c712-00ce-4dcf-ab04-7b6893c8725c\") " pod="openstack/ovn-controller-rwqzd" Sep 29 19:24:02 crc kubenswrapper[4779]: I0929 19:24:02.414934 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/8cebec61-5d6f-4bd2-a9e7-61f0b0f74751-var-log\") pod \"ovn-controller-ovs-lx5dn\" (UID: \"8cebec61-5d6f-4bd2-a9e7-61f0b0f74751\") " pod="openstack/ovn-controller-ovs-lx5dn" Sep 29 19:24:02 crc kubenswrapper[4779]: I0929 19:24:02.415038 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9771c712-00ce-4dcf-ab04-7b6893c8725c-combined-ca-bundle\") pod \"ovn-controller-rwqzd\" (UID: \"9771c712-00ce-4dcf-ab04-7b6893c8725c\") " pod="openstack/ovn-controller-rwqzd" Sep 29 19:24:02 crc kubenswrapper[4779]: I0929 19:24:02.415107 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/9771c712-00ce-4dcf-ab04-7b6893c8725c-var-run\") pod \"ovn-controller-rwqzd\" (UID: \"9771c712-00ce-4dcf-ab04-7b6893c8725c\") " pod="openstack/ovn-controller-rwqzd" Sep 29 19:24:02 crc kubenswrapper[4779]: I0929 19:24:02.415148 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/9771c712-00ce-4dcf-ab04-7b6893c8725c-var-run-ovn\") pod \"ovn-controller-rwqzd\" (UID: \"9771c712-00ce-4dcf-ab04-7b6893c8725c\") " pod="openstack/ovn-controller-rwqzd" Sep 29 19:24:02 crc kubenswrapper[4779]: I0929 19:24:02.415177 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rmfrk\" (UniqueName: \"kubernetes.io/projected/9771c712-00ce-4dcf-ab04-7b6893c8725c-kube-api-access-rmfrk\") pod \"ovn-controller-rwqzd\" (UID: \"9771c712-00ce-4dcf-ab04-7b6893c8725c\") " pod="openstack/ovn-controller-rwqzd" Sep 29 19:24:02 crc kubenswrapper[4779]: I0929 19:24:02.516381 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/8cebec61-5d6f-4bd2-a9e7-61f0b0f74751-scripts\") pod \"ovn-controller-ovs-lx5dn\" (UID: \"8cebec61-5d6f-4bd2-a9e7-61f0b0f74751\") " pod="openstack/ovn-controller-ovs-lx5dn" Sep 29 19:24:02 crc kubenswrapper[4779]: I0929 19:24:02.516427 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/8cebec61-5d6f-4bd2-a9e7-61f0b0f74751-etc-ovs\") pod \"ovn-controller-ovs-lx5dn\" (UID: \"8cebec61-5d6f-4bd2-a9e7-61f0b0f74751\") " pod="openstack/ovn-controller-ovs-lx5dn" Sep 29 19:24:02 crc kubenswrapper[4779]: I0929 19:24:02.516470 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/8cebec61-5d6f-4bd2-a9e7-61f0b0f74751-var-lib\") pod \"ovn-controller-ovs-lx5dn\" (UID: \"8cebec61-5d6f-4bd2-a9e7-61f0b0f74751\") " pod="openstack/ovn-controller-ovs-lx5dn" Sep 29 19:24:02 crc kubenswrapper[4779]: I0929 19:24:02.516494 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/8cebec61-5d6f-4bd2-a9e7-61f0b0f74751-var-run\") pod \"ovn-controller-ovs-lx5dn\" (UID: \"8cebec61-5d6f-4bd2-a9e7-61f0b0f74751\") " pod="openstack/ovn-controller-ovs-lx5dn" Sep 29 19:24:02 crc kubenswrapper[4779]: I0929 19:24:02.516521 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/9771c712-00ce-4dcf-ab04-7b6893c8725c-ovn-controller-tls-certs\") pod \"ovn-controller-rwqzd\" (UID: \"9771c712-00ce-4dcf-ab04-7b6893c8725c\") " pod="openstack/ovn-controller-rwqzd" Sep 29 19:24:02 crc kubenswrapper[4779]: I0929 19:24:02.516539 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/9771c712-00ce-4dcf-ab04-7b6893c8725c-var-log-ovn\") pod \"ovn-controller-rwqzd\" (UID: \"9771c712-00ce-4dcf-ab04-7b6893c8725c\") " pod="openstack/ovn-controller-rwqzd" Sep 29 19:24:02 crc kubenswrapper[4779]: I0929 19:24:02.516556 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/8cebec61-5d6f-4bd2-a9e7-61f0b0f74751-var-log\") pod \"ovn-controller-ovs-lx5dn\" (UID: \"8cebec61-5d6f-4bd2-a9e7-61f0b0f74751\") " pod="openstack/ovn-controller-ovs-lx5dn" Sep 29 19:24:02 crc kubenswrapper[4779]: I0929 19:24:02.516591 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9771c712-00ce-4dcf-ab04-7b6893c8725c-combined-ca-bundle\") pod \"ovn-controller-rwqzd\" (UID: \"9771c712-00ce-4dcf-ab04-7b6893c8725c\") " pod="openstack/ovn-controller-rwqzd" Sep 29 19:24:02 crc kubenswrapper[4779]: I0929 19:24:02.516619 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/9771c712-00ce-4dcf-ab04-7b6893c8725c-var-run\") pod \"ovn-controller-rwqzd\" (UID: \"9771c712-00ce-4dcf-ab04-7b6893c8725c\") " pod="openstack/ovn-controller-rwqzd" Sep 29 19:24:02 crc kubenswrapper[4779]: I0929 19:24:02.516639 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/9771c712-00ce-4dcf-ab04-7b6893c8725c-var-run-ovn\") pod \"ovn-controller-rwqzd\" (UID: \"9771c712-00ce-4dcf-ab04-7b6893c8725c\") " pod="openstack/ovn-controller-rwqzd" Sep 29 19:24:02 crc kubenswrapper[4779]: I0929 19:24:02.516657 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rmfrk\" (UniqueName: \"kubernetes.io/projected/9771c712-00ce-4dcf-ab04-7b6893c8725c-kube-api-access-rmfrk\") pod \"ovn-controller-rwqzd\" (UID: \"9771c712-00ce-4dcf-ab04-7b6893c8725c\") " pod="openstack/ovn-controller-rwqzd" Sep 29 19:24:02 crc kubenswrapper[4779]: I0929 19:24:02.516679 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lwslp\" (UniqueName: \"kubernetes.io/projected/8cebec61-5d6f-4bd2-a9e7-61f0b0f74751-kube-api-access-lwslp\") pod \"ovn-controller-ovs-lx5dn\" (UID: \"8cebec61-5d6f-4bd2-a9e7-61f0b0f74751\") " pod="openstack/ovn-controller-ovs-lx5dn" Sep 29 19:24:02 crc kubenswrapper[4779]: I0929 19:24:02.516694 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/9771c712-00ce-4dcf-ab04-7b6893c8725c-scripts\") pod \"ovn-controller-rwqzd\" (UID: \"9771c712-00ce-4dcf-ab04-7b6893c8725c\") " pod="openstack/ovn-controller-rwqzd" Sep 29 19:24:02 crc kubenswrapper[4779]: I0929 19:24:02.518902 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/8cebec61-5d6f-4bd2-a9e7-61f0b0f74751-var-run\") pod \"ovn-controller-ovs-lx5dn\" (UID: \"8cebec61-5d6f-4bd2-a9e7-61f0b0f74751\") " pod="openstack/ovn-controller-ovs-lx5dn" Sep 29 19:24:02 crc kubenswrapper[4779]: I0929 19:24:02.518910 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/9771c712-00ce-4dcf-ab04-7b6893c8725c-var-run\") pod \"ovn-controller-rwqzd\" (UID: \"9771c712-00ce-4dcf-ab04-7b6893c8725c\") " pod="openstack/ovn-controller-rwqzd" Sep 29 19:24:02 crc kubenswrapper[4779]: I0929 19:24:02.518957 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/8cebec61-5d6f-4bd2-a9e7-61f0b0f74751-var-log\") pod \"ovn-controller-ovs-lx5dn\" (UID: \"8cebec61-5d6f-4bd2-a9e7-61f0b0f74751\") " pod="openstack/ovn-controller-ovs-lx5dn" Sep 29 19:24:02 crc kubenswrapper[4779]: I0929 19:24:02.519052 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/9771c712-00ce-4dcf-ab04-7b6893c8725c-var-run-ovn\") pod \"ovn-controller-rwqzd\" (UID: \"9771c712-00ce-4dcf-ab04-7b6893c8725c\") " pod="openstack/ovn-controller-rwqzd" Sep 29 19:24:02 crc kubenswrapper[4779]: I0929 19:24:02.519047 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/8cebec61-5d6f-4bd2-a9e7-61f0b0f74751-var-lib\") pod \"ovn-controller-ovs-lx5dn\" (UID: \"8cebec61-5d6f-4bd2-a9e7-61f0b0f74751\") " pod="openstack/ovn-controller-ovs-lx5dn" Sep 29 19:24:02 crc kubenswrapper[4779]: I0929 19:24:02.519109 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/9771c712-00ce-4dcf-ab04-7b6893c8725c-var-log-ovn\") pod \"ovn-controller-rwqzd\" (UID: \"9771c712-00ce-4dcf-ab04-7b6893c8725c\") " pod="openstack/ovn-controller-rwqzd" Sep 29 19:24:02 crc kubenswrapper[4779]: I0929 19:24:02.519153 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/8cebec61-5d6f-4bd2-a9e7-61f0b0f74751-etc-ovs\") pod \"ovn-controller-ovs-lx5dn\" (UID: \"8cebec61-5d6f-4bd2-a9e7-61f0b0f74751\") " pod="openstack/ovn-controller-ovs-lx5dn" Sep 29 19:24:02 crc kubenswrapper[4779]: I0929 19:24:02.519299 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/9771c712-00ce-4dcf-ab04-7b6893c8725c-scripts\") pod \"ovn-controller-rwqzd\" (UID: \"9771c712-00ce-4dcf-ab04-7b6893c8725c\") " pod="openstack/ovn-controller-rwqzd" Sep 29 19:24:02 crc kubenswrapper[4779]: I0929 19:24:02.521578 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/8cebec61-5d6f-4bd2-a9e7-61f0b0f74751-scripts\") pod \"ovn-controller-ovs-lx5dn\" (UID: \"8cebec61-5d6f-4bd2-a9e7-61f0b0f74751\") " pod="openstack/ovn-controller-ovs-lx5dn" Sep 29 19:24:02 crc kubenswrapper[4779]: I0929 19:24:02.525434 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9771c712-00ce-4dcf-ab04-7b6893c8725c-combined-ca-bundle\") pod \"ovn-controller-rwqzd\" (UID: \"9771c712-00ce-4dcf-ab04-7b6893c8725c\") " pod="openstack/ovn-controller-rwqzd" Sep 29 19:24:02 crc kubenswrapper[4779]: I0929 19:24:02.529908 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/9771c712-00ce-4dcf-ab04-7b6893c8725c-ovn-controller-tls-certs\") pod \"ovn-controller-rwqzd\" (UID: \"9771c712-00ce-4dcf-ab04-7b6893c8725c\") " pod="openstack/ovn-controller-rwqzd" Sep 29 19:24:02 crc kubenswrapper[4779]: I0929 19:24:02.537153 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rmfrk\" (UniqueName: \"kubernetes.io/projected/9771c712-00ce-4dcf-ab04-7b6893c8725c-kube-api-access-rmfrk\") pod \"ovn-controller-rwqzd\" (UID: \"9771c712-00ce-4dcf-ab04-7b6893c8725c\") " pod="openstack/ovn-controller-rwqzd" Sep 29 19:24:02 crc kubenswrapper[4779]: I0929 19:24:02.543543 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lwslp\" (UniqueName: \"kubernetes.io/projected/8cebec61-5d6f-4bd2-a9e7-61f0b0f74751-kube-api-access-lwslp\") pod \"ovn-controller-ovs-lx5dn\" (UID: \"8cebec61-5d6f-4bd2-a9e7-61f0b0f74751\") " pod="openstack/ovn-controller-ovs-lx5dn" Sep 29 19:24:02 crc kubenswrapper[4779]: I0929 19:24:02.613345 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-rwqzd" Sep 29 19:24:02 crc kubenswrapper[4779]: I0929 19:24:02.624637 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-lx5dn" Sep 29 19:24:04 crc kubenswrapper[4779]: I0929 19:24:04.299783 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-sb-0"] Sep 29 19:24:04 crc kubenswrapper[4779]: I0929 19:24:04.301547 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Sep 29 19:24:04 crc kubenswrapper[4779]: I0929 19:24:04.305827 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-sb-scripts" Sep 29 19:24:04 crc kubenswrapper[4779]: I0929 19:24:04.305978 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovndbcluster-sb-ovndbs" Sep 29 19:24:04 crc kubenswrapper[4779]: I0929 19:24:04.306142 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncluster-ovndbcluster-sb-dockercfg-qbvvq" Sep 29 19:24:04 crc kubenswrapper[4779]: I0929 19:24:04.306285 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-sb-config" Sep 29 19:24:04 crc kubenswrapper[4779]: E0929 19:24:04.306578 4779 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified" Sep 29 19:24:04 crc kubenswrapper[4779]: I0929 19:24:04.307205 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-0"] Sep 29 19:24:04 crc kubenswrapper[4779]: E0929 19:24:04.314152 4779 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified" Sep 29 19:24:04 crc kubenswrapper[4779]: E0929 19:24:04.314348 4779 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:ndfhb5h667h568h584h5f9h58dh565h664h587h597h577h64bh5c4h66fh647hbdh68ch5c5h68dh686h5f7h64hd7hc6h55fh57bh98h57fh87h5fh57fq,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-7pksg,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-78dd6ddcc-j95zd_openstack(4f375efe-9d81-4148-8c26-b64c2c6705fa): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Sep 29 19:24:04 crc kubenswrapper[4779]: E0929 19:24:04.314444 4779 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:nffh5bdhf4h5f8h79h55h77h58fh56dh7bh6fh578hbch55dh68h56bhd9h65dh57ch658hc9h566h666h688h58h65dh684h5d7h6ch575h5d6h88q,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-qmjng,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-675f4bcbfc-hcgm5_openstack(7f1776a9-9887-40a8-8518-73b8c6701186): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Sep 29 19:24:04 crc kubenswrapper[4779]: E0929 19:24:04.315949 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-675f4bcbfc-hcgm5" podUID="7f1776a9-9887-40a8-8518-73b8c6701186" Sep 29 19:24:04 crc kubenswrapper[4779]: E0929 19:24:04.315981 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-78dd6ddcc-j95zd" podUID="4f375efe-9d81-4148-8c26-b64c2c6705fa" Sep 29 19:24:04 crc kubenswrapper[4779]: I0929 19:24:04.456498 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f5xwz\" (UniqueName: \"kubernetes.io/projected/bbcdb00b-849b-40c6-9e53-2c751dbd11dd-kube-api-access-f5xwz\") pod \"ovsdbserver-sb-0\" (UID: \"bbcdb00b-849b-40c6-9e53-2c751dbd11dd\") " pod="openstack/ovsdbserver-sb-0" Sep 29 19:24:04 crc kubenswrapper[4779]: I0929 19:24:04.456851 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bbcdb00b-849b-40c6-9e53-2c751dbd11dd-config\") pod \"ovsdbserver-sb-0\" (UID: \"bbcdb00b-849b-40c6-9e53-2c751dbd11dd\") " pod="openstack/ovsdbserver-sb-0" Sep 29 19:24:04 crc kubenswrapper[4779]: I0929 19:24:04.456898 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/bbcdb00b-849b-40c6-9e53-2c751dbd11dd-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"bbcdb00b-849b-40c6-9e53-2c751dbd11dd\") " pod="openstack/ovsdbserver-sb-0" Sep 29 19:24:04 crc kubenswrapper[4779]: I0929 19:24:04.456923 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bbcdb00b-849b-40c6-9e53-2c751dbd11dd-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"bbcdb00b-849b-40c6-9e53-2c751dbd11dd\") " pod="openstack/ovsdbserver-sb-0" Sep 29 19:24:04 crc kubenswrapper[4779]: I0929 19:24:04.456944 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/bbcdb00b-849b-40c6-9e53-2c751dbd11dd-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"bbcdb00b-849b-40c6-9e53-2c751dbd11dd\") " pod="openstack/ovsdbserver-sb-0" Sep 29 19:24:04 crc kubenswrapper[4779]: I0929 19:24:04.457009 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/bbcdb00b-849b-40c6-9e53-2c751dbd11dd-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"bbcdb00b-849b-40c6-9e53-2c751dbd11dd\") " pod="openstack/ovsdbserver-sb-0" Sep 29 19:24:04 crc kubenswrapper[4779]: I0929 19:24:04.457033 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"ovsdbserver-sb-0\" (UID: \"bbcdb00b-849b-40c6-9e53-2c751dbd11dd\") " pod="openstack/ovsdbserver-sb-0" Sep 29 19:24:04 crc kubenswrapper[4779]: I0929 19:24:04.457065 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/bbcdb00b-849b-40c6-9e53-2c751dbd11dd-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"bbcdb00b-849b-40c6-9e53-2c751dbd11dd\") " pod="openstack/ovsdbserver-sb-0" Sep 29 19:24:04 crc kubenswrapper[4779]: I0929 19:24:04.560129 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f5xwz\" (UniqueName: \"kubernetes.io/projected/bbcdb00b-849b-40c6-9e53-2c751dbd11dd-kube-api-access-f5xwz\") pod \"ovsdbserver-sb-0\" (UID: \"bbcdb00b-849b-40c6-9e53-2c751dbd11dd\") " pod="openstack/ovsdbserver-sb-0" Sep 29 19:24:04 crc kubenswrapper[4779]: I0929 19:24:04.560227 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bbcdb00b-849b-40c6-9e53-2c751dbd11dd-config\") pod \"ovsdbserver-sb-0\" (UID: \"bbcdb00b-849b-40c6-9e53-2c751dbd11dd\") " pod="openstack/ovsdbserver-sb-0" Sep 29 19:24:04 crc kubenswrapper[4779]: I0929 19:24:04.560257 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/bbcdb00b-849b-40c6-9e53-2c751dbd11dd-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"bbcdb00b-849b-40c6-9e53-2c751dbd11dd\") " pod="openstack/ovsdbserver-sb-0" Sep 29 19:24:04 crc kubenswrapper[4779]: I0929 19:24:04.560273 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bbcdb00b-849b-40c6-9e53-2c751dbd11dd-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"bbcdb00b-849b-40c6-9e53-2c751dbd11dd\") " pod="openstack/ovsdbserver-sb-0" Sep 29 19:24:04 crc kubenswrapper[4779]: I0929 19:24:04.560289 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/bbcdb00b-849b-40c6-9e53-2c751dbd11dd-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"bbcdb00b-849b-40c6-9e53-2c751dbd11dd\") " pod="openstack/ovsdbserver-sb-0" Sep 29 19:24:04 crc kubenswrapper[4779]: I0929 19:24:04.560347 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/bbcdb00b-849b-40c6-9e53-2c751dbd11dd-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"bbcdb00b-849b-40c6-9e53-2c751dbd11dd\") " pod="openstack/ovsdbserver-sb-0" Sep 29 19:24:04 crc kubenswrapper[4779]: I0929 19:24:04.560368 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"ovsdbserver-sb-0\" (UID: \"bbcdb00b-849b-40c6-9e53-2c751dbd11dd\") " pod="openstack/ovsdbserver-sb-0" Sep 29 19:24:04 crc kubenswrapper[4779]: I0929 19:24:04.560391 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/bbcdb00b-849b-40c6-9e53-2c751dbd11dd-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"bbcdb00b-849b-40c6-9e53-2c751dbd11dd\") " pod="openstack/ovsdbserver-sb-0" Sep 29 19:24:04 crc kubenswrapper[4779]: I0929 19:24:04.564560 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bbcdb00b-849b-40c6-9e53-2c751dbd11dd-config\") pod \"ovsdbserver-sb-0\" (UID: \"bbcdb00b-849b-40c6-9e53-2c751dbd11dd\") " pod="openstack/ovsdbserver-sb-0" Sep 29 19:24:04 crc kubenswrapper[4779]: I0929 19:24:04.567158 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/bbcdb00b-849b-40c6-9e53-2c751dbd11dd-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"bbcdb00b-849b-40c6-9e53-2c751dbd11dd\") " pod="openstack/ovsdbserver-sb-0" Sep 29 19:24:04 crc kubenswrapper[4779]: I0929 19:24:04.568225 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/bbcdb00b-849b-40c6-9e53-2c751dbd11dd-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"bbcdb00b-849b-40c6-9e53-2c751dbd11dd\") " pod="openstack/ovsdbserver-sb-0" Sep 29 19:24:04 crc kubenswrapper[4779]: I0929 19:24:04.568860 4779 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"ovsdbserver-sb-0\" (UID: \"bbcdb00b-849b-40c6-9e53-2c751dbd11dd\") device mount path \"/mnt/openstack/pv02\"" pod="openstack/ovsdbserver-sb-0" Sep 29 19:24:04 crc kubenswrapper[4779]: I0929 19:24:04.569513 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/bbcdb00b-849b-40c6-9e53-2c751dbd11dd-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"bbcdb00b-849b-40c6-9e53-2c751dbd11dd\") " pod="openstack/ovsdbserver-sb-0" Sep 29 19:24:04 crc kubenswrapper[4779]: I0929 19:24:04.570507 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bbcdb00b-849b-40c6-9e53-2c751dbd11dd-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"bbcdb00b-849b-40c6-9e53-2c751dbd11dd\") " pod="openstack/ovsdbserver-sb-0" Sep 29 19:24:04 crc kubenswrapper[4779]: I0929 19:24:04.570655 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/bbcdb00b-849b-40c6-9e53-2c751dbd11dd-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"bbcdb00b-849b-40c6-9e53-2c751dbd11dd\") " pod="openstack/ovsdbserver-sb-0" Sep 29 19:24:04 crc kubenswrapper[4779]: I0929 19:24:04.588479 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f5xwz\" (UniqueName: \"kubernetes.io/projected/bbcdb00b-849b-40c6-9e53-2c751dbd11dd-kube-api-access-f5xwz\") pod \"ovsdbserver-sb-0\" (UID: \"bbcdb00b-849b-40c6-9e53-2c751dbd11dd\") " pod="openstack/ovsdbserver-sb-0" Sep 29 19:24:04 crc kubenswrapper[4779]: I0929 19:24:04.673040 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"ovsdbserver-sb-0\" (UID: \"bbcdb00b-849b-40c6-9e53-2c751dbd11dd\") " pod="openstack/ovsdbserver-sb-0" Sep 29 19:24:04 crc kubenswrapper[4779]: I0929 19:24:04.931071 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Sep 29 19:24:04 crc kubenswrapper[4779]: I0929 19:24:04.949713 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78dd6ddcc-j95zd" Sep 29 19:24:05 crc kubenswrapper[4779]: I0929 19:24:05.001274 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-675f4bcbfc-hcgm5" Sep 29 19:24:05 crc kubenswrapper[4779]: I0929 19:24:05.070847 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/4f375efe-9d81-4148-8c26-b64c2c6705fa-dns-svc\") pod \"4f375efe-9d81-4148-8c26-b64c2c6705fa\" (UID: \"4f375efe-9d81-4148-8c26-b64c2c6705fa\") " Sep 29 19:24:05 crc kubenswrapper[4779]: I0929 19:24:05.070905 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7pksg\" (UniqueName: \"kubernetes.io/projected/4f375efe-9d81-4148-8c26-b64c2c6705fa-kube-api-access-7pksg\") pod \"4f375efe-9d81-4148-8c26-b64c2c6705fa\" (UID: \"4f375efe-9d81-4148-8c26-b64c2c6705fa\") " Sep 29 19:24:05 crc kubenswrapper[4779]: I0929 19:24:05.070999 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4f375efe-9d81-4148-8c26-b64c2c6705fa-config\") pod \"4f375efe-9d81-4148-8c26-b64c2c6705fa\" (UID: \"4f375efe-9d81-4148-8c26-b64c2c6705fa\") " Sep 29 19:24:05 crc kubenswrapper[4779]: I0929 19:24:05.071389 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4f375efe-9d81-4148-8c26-b64c2c6705fa-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "4f375efe-9d81-4148-8c26-b64c2c6705fa" (UID: "4f375efe-9d81-4148-8c26-b64c2c6705fa"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:24:05 crc kubenswrapper[4779]: I0929 19:24:05.071869 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4f375efe-9d81-4148-8c26-b64c2c6705fa-config" (OuterVolumeSpecName: "config") pod "4f375efe-9d81-4148-8c26-b64c2c6705fa" (UID: "4f375efe-9d81-4148-8c26-b64c2c6705fa"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:24:05 crc kubenswrapper[4779]: I0929 19:24:05.074247 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4f375efe-9d81-4148-8c26-b64c2c6705fa-kube-api-access-7pksg" (OuterVolumeSpecName: "kube-api-access-7pksg") pod "4f375efe-9d81-4148-8c26-b64c2c6705fa" (UID: "4f375efe-9d81-4148-8c26-b64c2c6705fa"). InnerVolumeSpecName "kube-api-access-7pksg". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:24:05 crc kubenswrapper[4779]: I0929 19:24:05.172688 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7f1776a9-9887-40a8-8518-73b8c6701186-config\") pod \"7f1776a9-9887-40a8-8518-73b8c6701186\" (UID: \"7f1776a9-9887-40a8-8518-73b8c6701186\") " Sep 29 19:24:05 crc kubenswrapper[4779]: I0929 19:24:05.172767 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qmjng\" (UniqueName: \"kubernetes.io/projected/7f1776a9-9887-40a8-8518-73b8c6701186-kube-api-access-qmjng\") pod \"7f1776a9-9887-40a8-8518-73b8c6701186\" (UID: \"7f1776a9-9887-40a8-8518-73b8c6701186\") " Sep 29 19:24:05 crc kubenswrapper[4779]: I0929 19:24:05.173130 4779 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4f375efe-9d81-4148-8c26-b64c2c6705fa-config\") on node \"crc\" DevicePath \"\"" Sep 29 19:24:05 crc kubenswrapper[4779]: I0929 19:24:05.173143 4779 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/4f375efe-9d81-4148-8c26-b64c2c6705fa-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 29 19:24:05 crc kubenswrapper[4779]: I0929 19:24:05.173152 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7pksg\" (UniqueName: \"kubernetes.io/projected/4f375efe-9d81-4148-8c26-b64c2c6705fa-kube-api-access-7pksg\") on node \"crc\" DevicePath \"\"" Sep 29 19:24:05 crc kubenswrapper[4779]: I0929 19:24:05.173336 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7f1776a9-9887-40a8-8518-73b8c6701186-config" (OuterVolumeSpecName: "config") pod "7f1776a9-9887-40a8-8518-73b8c6701186" (UID: "7f1776a9-9887-40a8-8518-73b8c6701186"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:24:05 crc kubenswrapper[4779]: I0929 19:24:05.177967 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7f1776a9-9887-40a8-8518-73b8c6701186-kube-api-access-qmjng" (OuterVolumeSpecName: "kube-api-access-qmjng") pod "7f1776a9-9887-40a8-8518-73b8c6701186" (UID: "7f1776a9-9887-40a8-8518-73b8c6701186"). InnerVolumeSpecName "kube-api-access-qmjng". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:24:05 crc kubenswrapper[4779]: I0929 19:24:05.275019 4779 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7f1776a9-9887-40a8-8518-73b8c6701186-config\") on node \"crc\" DevicePath \"\"" Sep 29 19:24:05 crc kubenswrapper[4779]: I0929 19:24:05.275049 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qmjng\" (UniqueName: \"kubernetes.io/projected/7f1776a9-9887-40a8-8518-73b8c6701186-kube-api-access-qmjng\") on node \"crc\" DevicePath \"\"" Sep 29 19:24:05 crc kubenswrapper[4779]: W0929 19:24:05.409591 4779 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod31200251_0f84_4946_88fb_276aa79589d9.slice/crio-3acc7de8f9bf67267b598ff3497da709c2dc562c07c1efb013cad3ac7b90e9b4 WatchSource:0}: Error finding container 3acc7de8f9bf67267b598ff3497da709c2dc562c07c1efb013cad3ac7b90e9b4: Status 404 returned error can't find the container with id 3acc7de8f9bf67267b598ff3497da709c2dc562c07c1efb013cad3ac7b90e9b4 Sep 29 19:24:05 crc kubenswrapper[4779]: I0929 19:24:05.411994 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Sep 29 19:24:05 crc kubenswrapper[4779]: W0929 19:24:05.413586 4779 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod549de7a5_30db_464d_bd6b_a6dcca25691d.slice/crio-ef04572ec2f4ad5b945e0078c290d645fa64f8c318b652de8427d8752806aa99 WatchSource:0}: Error finding container ef04572ec2f4ad5b945e0078c290d645fa64f8c318b652de8427d8752806aa99: Status 404 returned error can't find the container with id ef04572ec2f4ad5b945e0078c290d645fa64f8c318b652de8427d8752806aa99 Sep 29 19:24:05 crc kubenswrapper[4779]: I0929 19:24:05.428439 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Sep 29 19:24:05 crc kubenswrapper[4779]: I0929 19:24:05.441817 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-cell1-galera-0"] Sep 29 19:24:05 crc kubenswrapper[4779]: W0929 19:24:05.457042 4779 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod92364a65_9a43_4f86_8798_daf475ca26e6.slice/crio-9a44d0478a077dd18afd4b9ef04732b1d4a01bcac177419bbec663cb3eba95e3 WatchSource:0}: Error finding container 9a44d0478a077dd18afd4b9ef04732b1d4a01bcac177419bbec663cb3eba95e3: Status 404 returned error can't find the container with id 9a44d0478a077dd18afd4b9ef04732b1d4a01bcac177419bbec663cb3eba95e3 Sep 29 19:24:05 crc kubenswrapper[4779]: I0929 19:24:05.457434 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Sep 29 19:24:05 crc kubenswrapper[4779]: I0929 19:24:05.463808 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/memcached-0"] Sep 29 19:24:05 crc kubenswrapper[4779]: W0929 19:24:05.466414 4779 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod523fd020_2e02_4807_93b8_82ecbd1152eb.slice/crio-0a6a66e89e42551aec18d9db4a532cea2744f4c2a4d82854fcb8ac64bbc284b7 WatchSource:0}: Error finding container 0a6a66e89e42551aec18d9db4a532cea2744f4c2a4d82854fcb8ac64bbc284b7: Status 404 returned error can't find the container with id 0a6a66e89e42551aec18d9db4a532cea2744f4c2a4d82854fcb8ac64bbc284b7 Sep 29 19:24:05 crc kubenswrapper[4779]: I0929 19:24:05.470669 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-wn9td"] Sep 29 19:24:05 crc kubenswrapper[4779]: I0929 19:24:05.513514 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-rwqzd"] Sep 29 19:24:05 crc kubenswrapper[4779]: I0929 19:24:05.520233 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-galera-0"] Sep 29 19:24:05 crc kubenswrapper[4779]: I0929 19:24:05.524958 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-78dd6ddcc-j95zd" event={"ID":"4f375efe-9d81-4148-8c26-b64c2c6705fa","Type":"ContainerDied","Data":"304e55ab405748fb7b7bd49dfd549afb1e9362e4ee2d954dc5bbe02c4b30c0f3"} Sep 29 19:24:05 crc kubenswrapper[4779]: I0929 19:24:05.525107 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78dd6ddcc-j95zd" Sep 29 19:24:05 crc kubenswrapper[4779]: I0929 19:24:05.530459 4779 generic.go:334] "Generic (PLEG): container finished" podID="7687e8ef-621c-432c-8153-dbc11707461f" containerID="47df3acac219cdb1032810a53e07c21f4f32af5329d13598d1f8fc519838ecf4" exitCode=0 Sep 29 19:24:05 crc kubenswrapper[4779]: I0929 19:24:05.530560 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-666b6646f7-79mzc" event={"ID":"7687e8ef-621c-432c-8153-dbc11707461f","Type":"ContainerDied","Data":"47df3acac219cdb1032810a53e07c21f4f32af5329d13598d1f8fc519838ecf4"} Sep 29 19:24:05 crc kubenswrapper[4779]: I0929 19:24:05.535571 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"8de5fc49-d446-41aa-aa5e-d32fd04a281e","Type":"ContainerStarted","Data":"8cb7ac2c1d0551f551a730908db76d88668e31b1bb918e0f68bff0a59b92554d"} Sep 29 19:24:05 crc kubenswrapper[4779]: I0929 19:24:05.538630 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"31200251-0f84-4946-88fb-276aa79589d9","Type":"ContainerStarted","Data":"3acc7de8f9bf67267b598ff3497da709c2dc562c07c1efb013cad3ac7b90e9b4"} Sep 29 19:24:05 crc kubenswrapper[4779]: I0929 19:24:05.542769 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d769cc4f-wn9td" event={"ID":"92364a65-9a43-4f86-8798-daf475ca26e6","Type":"ContainerStarted","Data":"9a44d0478a077dd18afd4b9ef04732b1d4a01bcac177419bbec663cb3eba95e3"} Sep 29 19:24:05 crc kubenswrapper[4779]: I0929 19:24:05.550397 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"ebdb9bba-7f1b-4b21-b4e8-df31d3a8684c","Type":"ContainerStarted","Data":"2101be65d6b9c39d2907e20c2c18e9fa2bffc773d36a6108e9cfec6997234996"} Sep 29 19:24:05 crc kubenswrapper[4779]: I0929 19:24:05.564104 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"523fd020-2e02-4807-93b8-82ecbd1152eb","Type":"ContainerStarted","Data":"0a6a66e89e42551aec18d9db4a532cea2744f4c2a4d82854fcb8ac64bbc284b7"} Sep 29 19:24:05 crc kubenswrapper[4779]: I0929 19:24:05.573216 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-675f4bcbfc-hcgm5" event={"ID":"7f1776a9-9887-40a8-8518-73b8c6701186","Type":"ContainerDied","Data":"1738acd744de4793feba92d2f91a05516e515396c2fa8b24f39449ffb6810d57"} Sep 29 19:24:05 crc kubenswrapper[4779]: I0929 19:24:05.574660 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-675f4bcbfc-hcgm5" Sep 29 19:24:05 crc kubenswrapper[4779]: I0929 19:24:05.579989 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"549de7a5-30db-464d-bd6b-a6dcca25691d","Type":"ContainerStarted","Data":"ef04572ec2f4ad5b945e0078c290d645fa64f8c318b652de8427d8752806aa99"} Sep 29 19:24:05 crc kubenswrapper[4779]: I0929 19:24:05.613825 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-0"] Sep 29 19:24:05 crc kubenswrapper[4779]: I0929 19:24:05.630542 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-j95zd"] Sep 29 19:24:05 crc kubenswrapper[4779]: I0929 19:24:05.640421 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-j95zd"] Sep 29 19:24:05 crc kubenswrapper[4779]: I0929 19:24:05.661279 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-ovs-lx5dn"] Sep 29 19:24:05 crc kubenswrapper[4779]: I0929 19:24:05.669703 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-hcgm5"] Sep 29 19:24:05 crc kubenswrapper[4779]: I0929 19:24:05.676484 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-hcgm5"] Sep 29 19:24:05 crc kubenswrapper[4779]: I0929 19:24:05.774676 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4f375efe-9d81-4148-8c26-b64c2c6705fa" path="/var/lib/kubelet/pods/4f375efe-9d81-4148-8c26-b64c2c6705fa/volumes" Sep 29 19:24:05 crc kubenswrapper[4779]: I0929 19:24:05.775377 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7f1776a9-9887-40a8-8518-73b8c6701186" path="/var/lib/kubelet/pods/7f1776a9-9887-40a8-8518-73b8c6701186/volumes" Sep 29 19:24:06 crc kubenswrapper[4779]: I0929 19:24:06.223911 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-0"] Sep 29 19:24:06 crc kubenswrapper[4779]: W0929 19:24:06.336008 4779 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podbbcdb00b_849b_40c6_9e53_2c751dbd11dd.slice/crio-458def5169f00c09c84f50255337c8b5d9341330f491426ef9a5e36477fc22f9 WatchSource:0}: Error finding container 458def5169f00c09c84f50255337c8b5d9341330f491426ef9a5e36477fc22f9: Status 404 returned error can't find the container with id 458def5169f00c09c84f50255337c8b5d9341330f491426ef9a5e36477fc22f9 Sep 29 19:24:06 crc kubenswrapper[4779]: I0929 19:24:06.587894 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"f5cf224b-f179-4c3c-bc69-5f3d448aca1d","Type":"ContainerStarted","Data":"a2996810b706dc97a7a22fd785f55bd68cc1031f5085b60bcc4de8a46af746a5"} Sep 29 19:24:06 crc kubenswrapper[4779]: I0929 19:24:06.589668 4779 generic.go:334] "Generic (PLEG): container finished" podID="92364a65-9a43-4f86-8798-daf475ca26e6" containerID="ea3c071d5a29a22ec6c4c8677e08f1ce3b70883db3e673f8ab1a89d6e80d6d2f" exitCode=0 Sep 29 19:24:06 crc kubenswrapper[4779]: I0929 19:24:06.589756 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d769cc4f-wn9td" event={"ID":"92364a65-9a43-4f86-8798-daf475ca26e6","Type":"ContainerDied","Data":"ea3c071d5a29a22ec6c4c8677e08f1ce3b70883db3e673f8ab1a89d6e80d6d2f"} Sep 29 19:24:06 crc kubenswrapper[4779]: I0929 19:24:06.593237 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-666b6646f7-79mzc" event={"ID":"7687e8ef-621c-432c-8153-dbc11707461f","Type":"ContainerStarted","Data":"867893830a26ed3fa1e4aea27964795ac0f46b802ececc8fa49e0e25fb503db6"} Sep 29 19:24:06 crc kubenswrapper[4779]: I0929 19:24:06.593900 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-666b6646f7-79mzc" Sep 29 19:24:06 crc kubenswrapper[4779]: I0929 19:24:06.597989 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"79136f36-d427-488e-81e2-ef55c73ee91a","Type":"ContainerStarted","Data":"4d0957e4fbbdf13fd36d371c20d9ff44e654a7cdaf7e519561d0331289a830b4"} Sep 29 19:24:06 crc kubenswrapper[4779]: I0929 19:24:06.599166 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"bbcdb00b-849b-40c6-9e53-2c751dbd11dd","Type":"ContainerStarted","Data":"458def5169f00c09c84f50255337c8b5d9341330f491426ef9a5e36477fc22f9"} Sep 29 19:24:06 crc kubenswrapper[4779]: I0929 19:24:06.600990 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-rwqzd" event={"ID":"9771c712-00ce-4dcf-ab04-7b6893c8725c","Type":"ContainerStarted","Data":"4432c815d7a1cd374e08f9140f6c7326d877a1a35dd8442b49a76e01a589e85f"} Sep 29 19:24:06 crc kubenswrapper[4779]: I0929 19:24:06.606580 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-lx5dn" event={"ID":"8cebec61-5d6f-4bd2-a9e7-61f0b0f74751","Type":"ContainerStarted","Data":"5481671756b105cc870e2f618e2b5416946a8073a8220e75858369b87cd4c258"} Sep 29 19:24:06 crc kubenswrapper[4779]: I0929 19:24:06.625563 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-666b6646f7-79mzc" podStartSLOduration=7.107594944 podStartE2EDuration="16.62554654s" podCreationTimestamp="2025-09-29 19:23:50 +0000 UTC" firstStartedPulling="2025-09-29 19:23:55.074339407 +0000 UTC m=+945.958764507" lastFinishedPulling="2025-09-29 19:24:04.592291003 +0000 UTC m=+955.476716103" observedRunningTime="2025-09-29 19:24:06.622124537 +0000 UTC m=+957.506549637" watchObservedRunningTime="2025-09-29 19:24:06.62554654 +0000 UTC m=+957.509971640" Sep 29 19:24:10 crc kubenswrapper[4779]: I0929 19:24:10.843534 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-666b6646f7-79mzc" Sep 29 19:24:13 crc kubenswrapper[4779]: I0929 19:24:13.663893 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-lx5dn" event={"ID":"8cebec61-5d6f-4bd2-a9e7-61f0b0f74751","Type":"ContainerStarted","Data":"7591e8ea3efb0efa414a17639ae7b40a365f18ef6bb0d7ef1fa110158f198990"} Sep 29 19:24:13 crc kubenswrapper[4779]: I0929 19:24:13.666700 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"f5cf224b-f179-4c3c-bc69-5f3d448aca1d","Type":"ContainerStarted","Data":"0fb980298cd73136188a0518c2f5df69e9dd3d7c353d543ee36dac2bd6dfbf46"} Sep 29 19:24:13 crc kubenswrapper[4779]: I0929 19:24:13.669187 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d769cc4f-wn9td" event={"ID":"92364a65-9a43-4f86-8798-daf475ca26e6","Type":"ContainerStarted","Data":"49b769762112f2c598db9b5213e030193bfdd8bb1546476f2cdf1b43fdc3d9a0"} Sep 29 19:24:13 crc kubenswrapper[4779]: I0929 19:24:13.669355 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-57d769cc4f-wn9td" Sep 29 19:24:13 crc kubenswrapper[4779]: I0929 19:24:13.676190 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"8de5fc49-d446-41aa-aa5e-d32fd04a281e","Type":"ContainerStarted","Data":"3120508c8f01250599619e712411f50bdbd07662515f40ca32a632b92cdec85e"} Sep 29 19:24:13 crc kubenswrapper[4779]: I0929 19:24:13.676349 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/memcached-0" Sep 29 19:24:13 crc kubenswrapper[4779]: I0929 19:24:13.678033 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"549de7a5-30db-464d-bd6b-a6dcca25691d","Type":"ContainerStarted","Data":"debe7210bbb9acdaf59c8ce863e712cd0130f5fa1480136f51dd85b3f8966c43"} Sep 29 19:24:13 crc kubenswrapper[4779]: I0929 19:24:13.731203 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/memcached-0" podStartSLOduration=12.137254882 podStartE2EDuration="18.73118346s" podCreationTimestamp="2025-09-29 19:23:55 +0000 UTC" firstStartedPulling="2025-09-29 19:24:05.44553 +0000 UTC m=+956.329955110" lastFinishedPulling="2025-09-29 19:24:12.039458558 +0000 UTC m=+962.923883688" observedRunningTime="2025-09-29 19:24:13.723375787 +0000 UTC m=+964.607800887" watchObservedRunningTime="2025-09-29 19:24:13.73118346 +0000 UTC m=+964.615608560" Sep 29 19:24:13 crc kubenswrapper[4779]: I0929 19:24:13.776691 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-57d769cc4f-wn9td" podStartSLOduration=23.776665739 podStartE2EDuration="23.776665739s" podCreationTimestamp="2025-09-29 19:23:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 19:24:13.769410551 +0000 UTC m=+964.653835671" watchObservedRunningTime="2025-09-29 19:24:13.776665739 +0000 UTC m=+964.661090859" Sep 29 19:24:14 crc kubenswrapper[4779]: I0929 19:24:14.054375 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-metrics-ds8vm"] Sep 29 19:24:14 crc kubenswrapper[4779]: I0929 19:24:14.055487 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-ds8vm" Sep 29 19:24:14 crc kubenswrapper[4779]: I0929 19:24:14.057803 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-metrics-config" Sep 29 19:24:14 crc kubenswrapper[4779]: I0929 19:24:14.076320 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-metrics-ds8vm"] Sep 29 19:24:14 crc kubenswrapper[4779]: E0929 19:24:14.135852 4779 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8cebec61_5d6f_4bd2_a9e7_61f0b0f74751.slice/crio-7591e8ea3efb0efa414a17639ae7b40a365f18ef6bb0d7ef1fa110158f198990.scope\": RecentStats: unable to find data in memory cache]" Sep 29 19:24:14 crc kubenswrapper[4779]: I0929 19:24:14.145108 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e1ff3819-9cf2-492f-a447-94a898e0a54d-config\") pod \"ovn-controller-metrics-ds8vm\" (UID: \"e1ff3819-9cf2-492f-a447-94a898e0a54d\") " pod="openstack/ovn-controller-metrics-ds8vm" Sep 29 19:24:14 crc kubenswrapper[4779]: I0929 19:24:14.145149 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/e1ff3819-9cf2-492f-a447-94a898e0a54d-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-ds8vm\" (UID: \"e1ff3819-9cf2-492f-a447-94a898e0a54d\") " pod="openstack/ovn-controller-metrics-ds8vm" Sep 29 19:24:14 crc kubenswrapper[4779]: I0929 19:24:14.145181 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/e1ff3819-9cf2-492f-a447-94a898e0a54d-ovn-rundir\") pod \"ovn-controller-metrics-ds8vm\" (UID: \"e1ff3819-9cf2-492f-a447-94a898e0a54d\") " pod="openstack/ovn-controller-metrics-ds8vm" Sep 29 19:24:14 crc kubenswrapper[4779]: I0929 19:24:14.145214 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e1ff3819-9cf2-492f-a447-94a898e0a54d-combined-ca-bundle\") pod \"ovn-controller-metrics-ds8vm\" (UID: \"e1ff3819-9cf2-492f-a447-94a898e0a54d\") " pod="openstack/ovn-controller-metrics-ds8vm" Sep 29 19:24:14 crc kubenswrapper[4779]: I0929 19:24:14.145279 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/e1ff3819-9cf2-492f-a447-94a898e0a54d-ovs-rundir\") pod \"ovn-controller-metrics-ds8vm\" (UID: \"e1ff3819-9cf2-492f-a447-94a898e0a54d\") " pod="openstack/ovn-controller-metrics-ds8vm" Sep 29 19:24:14 crc kubenswrapper[4779]: I0929 19:24:14.145324 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nlf6z\" (UniqueName: \"kubernetes.io/projected/e1ff3819-9cf2-492f-a447-94a898e0a54d-kube-api-access-nlf6z\") pod \"ovn-controller-metrics-ds8vm\" (UID: \"e1ff3819-9cf2-492f-a447-94a898e0a54d\") " pod="openstack/ovn-controller-metrics-ds8vm" Sep 29 19:24:14 crc kubenswrapper[4779]: I0929 19:24:14.195801 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-wn9td"] Sep 29 19:24:14 crc kubenswrapper[4779]: I0929 19:24:14.233867 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-7fd796d7df-7pg92"] Sep 29 19:24:14 crc kubenswrapper[4779]: I0929 19:24:14.235869 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7fd796d7df-7pg92" Sep 29 19:24:14 crc kubenswrapper[4779]: I0929 19:24:14.244723 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7fd796d7df-7pg92"] Sep 29 19:24:14 crc kubenswrapper[4779]: I0929 19:24:14.247508 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovsdbserver-nb" Sep 29 19:24:14 crc kubenswrapper[4779]: I0929 19:24:14.247525 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e1ff3819-9cf2-492f-a447-94a898e0a54d-config\") pod \"ovn-controller-metrics-ds8vm\" (UID: \"e1ff3819-9cf2-492f-a447-94a898e0a54d\") " pod="openstack/ovn-controller-metrics-ds8vm" Sep 29 19:24:14 crc kubenswrapper[4779]: I0929 19:24:14.247552 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/e1ff3819-9cf2-492f-a447-94a898e0a54d-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-ds8vm\" (UID: \"e1ff3819-9cf2-492f-a447-94a898e0a54d\") " pod="openstack/ovn-controller-metrics-ds8vm" Sep 29 19:24:14 crc kubenswrapper[4779]: I0929 19:24:14.247574 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/e1ff3819-9cf2-492f-a447-94a898e0a54d-ovn-rundir\") pod \"ovn-controller-metrics-ds8vm\" (UID: \"e1ff3819-9cf2-492f-a447-94a898e0a54d\") " pod="openstack/ovn-controller-metrics-ds8vm" Sep 29 19:24:14 crc kubenswrapper[4779]: I0929 19:24:14.247600 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e1ff3819-9cf2-492f-a447-94a898e0a54d-combined-ca-bundle\") pod \"ovn-controller-metrics-ds8vm\" (UID: \"e1ff3819-9cf2-492f-a447-94a898e0a54d\") " pod="openstack/ovn-controller-metrics-ds8vm" Sep 29 19:24:14 crc kubenswrapper[4779]: I0929 19:24:14.247646 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/e1ff3819-9cf2-492f-a447-94a898e0a54d-ovs-rundir\") pod \"ovn-controller-metrics-ds8vm\" (UID: \"e1ff3819-9cf2-492f-a447-94a898e0a54d\") " pod="openstack/ovn-controller-metrics-ds8vm" Sep 29 19:24:14 crc kubenswrapper[4779]: I0929 19:24:14.247675 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nlf6z\" (UniqueName: \"kubernetes.io/projected/e1ff3819-9cf2-492f-a447-94a898e0a54d-kube-api-access-nlf6z\") pod \"ovn-controller-metrics-ds8vm\" (UID: \"e1ff3819-9cf2-492f-a447-94a898e0a54d\") " pod="openstack/ovn-controller-metrics-ds8vm" Sep 29 19:24:14 crc kubenswrapper[4779]: I0929 19:24:14.248136 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/e1ff3819-9cf2-492f-a447-94a898e0a54d-ovn-rundir\") pod \"ovn-controller-metrics-ds8vm\" (UID: \"e1ff3819-9cf2-492f-a447-94a898e0a54d\") " pod="openstack/ovn-controller-metrics-ds8vm" Sep 29 19:24:14 crc kubenswrapper[4779]: I0929 19:24:14.248825 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e1ff3819-9cf2-492f-a447-94a898e0a54d-config\") pod \"ovn-controller-metrics-ds8vm\" (UID: \"e1ff3819-9cf2-492f-a447-94a898e0a54d\") " pod="openstack/ovn-controller-metrics-ds8vm" Sep 29 19:24:14 crc kubenswrapper[4779]: I0929 19:24:14.250293 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/e1ff3819-9cf2-492f-a447-94a898e0a54d-ovs-rundir\") pod \"ovn-controller-metrics-ds8vm\" (UID: \"e1ff3819-9cf2-492f-a447-94a898e0a54d\") " pod="openstack/ovn-controller-metrics-ds8vm" Sep 29 19:24:14 crc kubenswrapper[4779]: I0929 19:24:14.275132 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/e1ff3819-9cf2-492f-a447-94a898e0a54d-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-ds8vm\" (UID: \"e1ff3819-9cf2-492f-a447-94a898e0a54d\") " pod="openstack/ovn-controller-metrics-ds8vm" Sep 29 19:24:14 crc kubenswrapper[4779]: I0929 19:24:14.276794 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nlf6z\" (UniqueName: \"kubernetes.io/projected/e1ff3819-9cf2-492f-a447-94a898e0a54d-kube-api-access-nlf6z\") pod \"ovn-controller-metrics-ds8vm\" (UID: \"e1ff3819-9cf2-492f-a447-94a898e0a54d\") " pod="openstack/ovn-controller-metrics-ds8vm" Sep 29 19:24:14 crc kubenswrapper[4779]: I0929 19:24:14.282540 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e1ff3819-9cf2-492f-a447-94a898e0a54d-combined-ca-bundle\") pod \"ovn-controller-metrics-ds8vm\" (UID: \"e1ff3819-9cf2-492f-a447-94a898e0a54d\") " pod="openstack/ovn-controller-metrics-ds8vm" Sep 29 19:24:14 crc kubenswrapper[4779]: I0929 19:24:14.348854 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7bx7n\" (UniqueName: \"kubernetes.io/projected/ef5982c8-0c16-4468-8256-63c4b71961a1-kube-api-access-7bx7n\") pod \"dnsmasq-dns-7fd796d7df-7pg92\" (UID: \"ef5982c8-0c16-4468-8256-63c4b71961a1\") " pod="openstack/dnsmasq-dns-7fd796d7df-7pg92" Sep 29 19:24:14 crc kubenswrapper[4779]: I0929 19:24:14.348938 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ef5982c8-0c16-4468-8256-63c4b71961a1-config\") pod \"dnsmasq-dns-7fd796d7df-7pg92\" (UID: \"ef5982c8-0c16-4468-8256-63c4b71961a1\") " pod="openstack/dnsmasq-dns-7fd796d7df-7pg92" Sep 29 19:24:14 crc kubenswrapper[4779]: I0929 19:24:14.349041 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ef5982c8-0c16-4468-8256-63c4b71961a1-ovsdbserver-nb\") pod \"dnsmasq-dns-7fd796d7df-7pg92\" (UID: \"ef5982c8-0c16-4468-8256-63c4b71961a1\") " pod="openstack/dnsmasq-dns-7fd796d7df-7pg92" Sep 29 19:24:14 crc kubenswrapper[4779]: I0929 19:24:14.349062 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ef5982c8-0c16-4468-8256-63c4b71961a1-dns-svc\") pod \"dnsmasq-dns-7fd796d7df-7pg92\" (UID: \"ef5982c8-0c16-4468-8256-63c4b71961a1\") " pod="openstack/dnsmasq-dns-7fd796d7df-7pg92" Sep 29 19:24:14 crc kubenswrapper[4779]: I0929 19:24:14.351684 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7fd796d7df-7pg92"] Sep 29 19:24:14 crc kubenswrapper[4779]: E0929 19:24:14.352235 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="unmounted volumes=[config dns-svc kube-api-access-7bx7n ovsdbserver-nb], unattached volumes=[], failed to process volumes=[]: context canceled" pod="openstack/dnsmasq-dns-7fd796d7df-7pg92" podUID="ef5982c8-0c16-4468-8256-63c4b71961a1" Sep 29 19:24:14 crc kubenswrapper[4779]: I0929 19:24:14.378580 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-ds8vm" Sep 29 19:24:14 crc kubenswrapper[4779]: I0929 19:24:14.393639 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-86db49b7ff-85qrs"] Sep 29 19:24:14 crc kubenswrapper[4779]: I0929 19:24:14.396181 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-86db49b7ff-85qrs" Sep 29 19:24:14 crc kubenswrapper[4779]: I0929 19:24:14.398851 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-86db49b7ff-85qrs"] Sep 29 19:24:14 crc kubenswrapper[4779]: I0929 19:24:14.408504 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovsdbserver-sb" Sep 29 19:24:14 crc kubenswrapper[4779]: I0929 19:24:14.449698 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ef5982c8-0c16-4468-8256-63c4b71961a1-ovsdbserver-nb\") pod \"dnsmasq-dns-7fd796d7df-7pg92\" (UID: \"ef5982c8-0c16-4468-8256-63c4b71961a1\") " pod="openstack/dnsmasq-dns-7fd796d7df-7pg92" Sep 29 19:24:14 crc kubenswrapper[4779]: I0929 19:24:14.449735 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ef5982c8-0c16-4468-8256-63c4b71961a1-dns-svc\") pod \"dnsmasq-dns-7fd796d7df-7pg92\" (UID: \"ef5982c8-0c16-4468-8256-63c4b71961a1\") " pod="openstack/dnsmasq-dns-7fd796d7df-7pg92" Sep 29 19:24:14 crc kubenswrapper[4779]: I0929 19:24:14.449766 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7bx7n\" (UniqueName: \"kubernetes.io/projected/ef5982c8-0c16-4468-8256-63c4b71961a1-kube-api-access-7bx7n\") pod \"dnsmasq-dns-7fd796d7df-7pg92\" (UID: \"ef5982c8-0c16-4468-8256-63c4b71961a1\") " pod="openstack/dnsmasq-dns-7fd796d7df-7pg92" Sep 29 19:24:14 crc kubenswrapper[4779]: I0929 19:24:14.449789 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ef5982c8-0c16-4468-8256-63c4b71961a1-config\") pod \"dnsmasq-dns-7fd796d7df-7pg92\" (UID: \"ef5982c8-0c16-4468-8256-63c4b71961a1\") " pod="openstack/dnsmasq-dns-7fd796d7df-7pg92" Sep 29 19:24:14 crc kubenswrapper[4779]: I0929 19:24:14.450756 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ef5982c8-0c16-4468-8256-63c4b71961a1-config\") pod \"dnsmasq-dns-7fd796d7df-7pg92\" (UID: \"ef5982c8-0c16-4468-8256-63c4b71961a1\") " pod="openstack/dnsmasq-dns-7fd796d7df-7pg92" Sep 29 19:24:14 crc kubenswrapper[4779]: I0929 19:24:14.451254 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ef5982c8-0c16-4468-8256-63c4b71961a1-ovsdbserver-nb\") pod \"dnsmasq-dns-7fd796d7df-7pg92\" (UID: \"ef5982c8-0c16-4468-8256-63c4b71961a1\") " pod="openstack/dnsmasq-dns-7fd796d7df-7pg92" Sep 29 19:24:14 crc kubenswrapper[4779]: I0929 19:24:14.451932 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ef5982c8-0c16-4468-8256-63c4b71961a1-dns-svc\") pod \"dnsmasq-dns-7fd796d7df-7pg92\" (UID: \"ef5982c8-0c16-4468-8256-63c4b71961a1\") " pod="openstack/dnsmasq-dns-7fd796d7df-7pg92" Sep 29 19:24:14 crc kubenswrapper[4779]: I0929 19:24:14.468267 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7bx7n\" (UniqueName: \"kubernetes.io/projected/ef5982c8-0c16-4468-8256-63c4b71961a1-kube-api-access-7bx7n\") pod \"dnsmasq-dns-7fd796d7df-7pg92\" (UID: \"ef5982c8-0c16-4468-8256-63c4b71961a1\") " pod="openstack/dnsmasq-dns-7fd796d7df-7pg92" Sep 29 19:24:14 crc kubenswrapper[4779]: I0929 19:24:14.551289 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c23ebfd4-1d4f-45fc-9c53-888ce644ad45-config\") pod \"dnsmasq-dns-86db49b7ff-85qrs\" (UID: \"c23ebfd4-1d4f-45fc-9c53-888ce644ad45\") " pod="openstack/dnsmasq-dns-86db49b7ff-85qrs" Sep 29 19:24:14 crc kubenswrapper[4779]: I0929 19:24:14.551527 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c23ebfd4-1d4f-45fc-9c53-888ce644ad45-ovsdbserver-nb\") pod \"dnsmasq-dns-86db49b7ff-85qrs\" (UID: \"c23ebfd4-1d4f-45fc-9c53-888ce644ad45\") " pod="openstack/dnsmasq-dns-86db49b7ff-85qrs" Sep 29 19:24:14 crc kubenswrapper[4779]: I0929 19:24:14.551812 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c23ebfd4-1d4f-45fc-9c53-888ce644ad45-ovsdbserver-sb\") pod \"dnsmasq-dns-86db49b7ff-85qrs\" (UID: \"c23ebfd4-1d4f-45fc-9c53-888ce644ad45\") " pod="openstack/dnsmasq-dns-86db49b7ff-85qrs" Sep 29 19:24:14 crc kubenswrapper[4779]: I0929 19:24:14.551875 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c23ebfd4-1d4f-45fc-9c53-888ce644ad45-dns-svc\") pod \"dnsmasq-dns-86db49b7ff-85qrs\" (UID: \"c23ebfd4-1d4f-45fc-9c53-888ce644ad45\") " pod="openstack/dnsmasq-dns-86db49b7ff-85qrs" Sep 29 19:24:14 crc kubenswrapper[4779]: I0929 19:24:14.551954 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nwddk\" (UniqueName: \"kubernetes.io/projected/c23ebfd4-1d4f-45fc-9c53-888ce644ad45-kube-api-access-nwddk\") pod \"dnsmasq-dns-86db49b7ff-85qrs\" (UID: \"c23ebfd4-1d4f-45fc-9c53-888ce644ad45\") " pod="openstack/dnsmasq-dns-86db49b7ff-85qrs" Sep 29 19:24:14 crc kubenswrapper[4779]: I0929 19:24:14.655134 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c23ebfd4-1d4f-45fc-9c53-888ce644ad45-ovsdbserver-nb\") pod \"dnsmasq-dns-86db49b7ff-85qrs\" (UID: \"c23ebfd4-1d4f-45fc-9c53-888ce644ad45\") " pod="openstack/dnsmasq-dns-86db49b7ff-85qrs" Sep 29 19:24:14 crc kubenswrapper[4779]: I0929 19:24:14.655257 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c23ebfd4-1d4f-45fc-9c53-888ce644ad45-ovsdbserver-sb\") pod \"dnsmasq-dns-86db49b7ff-85qrs\" (UID: \"c23ebfd4-1d4f-45fc-9c53-888ce644ad45\") " pod="openstack/dnsmasq-dns-86db49b7ff-85qrs" Sep 29 19:24:14 crc kubenswrapper[4779]: I0929 19:24:14.655278 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c23ebfd4-1d4f-45fc-9c53-888ce644ad45-dns-svc\") pod \"dnsmasq-dns-86db49b7ff-85qrs\" (UID: \"c23ebfd4-1d4f-45fc-9c53-888ce644ad45\") " pod="openstack/dnsmasq-dns-86db49b7ff-85qrs" Sep 29 19:24:14 crc kubenswrapper[4779]: I0929 19:24:14.655302 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nwddk\" (UniqueName: \"kubernetes.io/projected/c23ebfd4-1d4f-45fc-9c53-888ce644ad45-kube-api-access-nwddk\") pod \"dnsmasq-dns-86db49b7ff-85qrs\" (UID: \"c23ebfd4-1d4f-45fc-9c53-888ce644ad45\") " pod="openstack/dnsmasq-dns-86db49b7ff-85qrs" Sep 29 19:24:14 crc kubenswrapper[4779]: I0929 19:24:14.655337 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c23ebfd4-1d4f-45fc-9c53-888ce644ad45-config\") pod \"dnsmasq-dns-86db49b7ff-85qrs\" (UID: \"c23ebfd4-1d4f-45fc-9c53-888ce644ad45\") " pod="openstack/dnsmasq-dns-86db49b7ff-85qrs" Sep 29 19:24:14 crc kubenswrapper[4779]: I0929 19:24:14.656162 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c23ebfd4-1d4f-45fc-9c53-888ce644ad45-config\") pod \"dnsmasq-dns-86db49b7ff-85qrs\" (UID: \"c23ebfd4-1d4f-45fc-9c53-888ce644ad45\") " pod="openstack/dnsmasq-dns-86db49b7ff-85qrs" Sep 29 19:24:14 crc kubenswrapper[4779]: I0929 19:24:14.656820 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c23ebfd4-1d4f-45fc-9c53-888ce644ad45-dns-svc\") pod \"dnsmasq-dns-86db49b7ff-85qrs\" (UID: \"c23ebfd4-1d4f-45fc-9c53-888ce644ad45\") " pod="openstack/dnsmasq-dns-86db49b7ff-85qrs" Sep 29 19:24:14 crc kubenswrapper[4779]: I0929 19:24:14.656838 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c23ebfd4-1d4f-45fc-9c53-888ce644ad45-ovsdbserver-sb\") pod \"dnsmasq-dns-86db49b7ff-85qrs\" (UID: \"c23ebfd4-1d4f-45fc-9c53-888ce644ad45\") " pod="openstack/dnsmasq-dns-86db49b7ff-85qrs" Sep 29 19:24:14 crc kubenswrapper[4779]: I0929 19:24:14.657364 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c23ebfd4-1d4f-45fc-9c53-888ce644ad45-ovsdbserver-nb\") pod \"dnsmasq-dns-86db49b7ff-85qrs\" (UID: \"c23ebfd4-1d4f-45fc-9c53-888ce644ad45\") " pod="openstack/dnsmasq-dns-86db49b7ff-85qrs" Sep 29 19:24:14 crc kubenswrapper[4779]: I0929 19:24:14.673126 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nwddk\" (UniqueName: \"kubernetes.io/projected/c23ebfd4-1d4f-45fc-9c53-888ce644ad45-kube-api-access-nwddk\") pod \"dnsmasq-dns-86db49b7ff-85qrs\" (UID: \"c23ebfd4-1d4f-45fc-9c53-888ce644ad45\") " pod="openstack/dnsmasq-dns-86db49b7ff-85qrs" Sep 29 19:24:14 crc kubenswrapper[4779]: I0929 19:24:14.693864 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"31200251-0f84-4946-88fb-276aa79589d9","Type":"ContainerStarted","Data":"fb89b502ce9cf0e205f2a977c78f1c39af574a6dcc9c2232d717aa86270470e2"} Sep 29 19:24:14 crc kubenswrapper[4779]: I0929 19:24:14.696878 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"bbcdb00b-849b-40c6-9e53-2c751dbd11dd","Type":"ContainerStarted","Data":"3f0dba465722d4b6d0223e04f50428594dd223694017106d5b8537ea70d27e20"} Sep 29 19:24:14 crc kubenswrapper[4779]: I0929 19:24:14.699274 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-rwqzd" event={"ID":"9771c712-00ce-4dcf-ab04-7b6893c8725c","Type":"ContainerStarted","Data":"8d29719cc6900ef5efd3b38fc88316c5434706ad4cc2b06da1b2b44269ca3280"} Sep 29 19:24:14 crc kubenswrapper[4779]: I0929 19:24:14.699360 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-rwqzd" Sep 29 19:24:14 crc kubenswrapper[4779]: I0929 19:24:14.700719 4779 generic.go:334] "Generic (PLEG): container finished" podID="8cebec61-5d6f-4bd2-a9e7-61f0b0f74751" containerID="7591e8ea3efb0efa414a17639ae7b40a365f18ef6bb0d7ef1fa110158f198990" exitCode=0 Sep 29 19:24:14 crc kubenswrapper[4779]: I0929 19:24:14.700769 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-lx5dn" event={"ID":"8cebec61-5d6f-4bd2-a9e7-61f0b0f74751","Type":"ContainerDied","Data":"7591e8ea3efb0efa414a17639ae7b40a365f18ef6bb0d7ef1fa110158f198990"} Sep 29 19:24:14 crc kubenswrapper[4779]: I0929 19:24:14.702573 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"ebdb9bba-7f1b-4b21-b4e8-df31d3a8684c","Type":"ContainerStarted","Data":"c27cc2552e3e4e6c5f72414cc36b943039f9ed94926d9febe0b7c874be756137"} Sep 29 19:24:14 crc kubenswrapper[4779]: I0929 19:24:14.702704 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/kube-state-metrics-0" Sep 29 19:24:14 crc kubenswrapper[4779]: I0929 19:24:14.703590 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"523fd020-2e02-4807-93b8-82ecbd1152eb","Type":"ContainerStarted","Data":"9776af2c324857cc31ce4e6ed8bdf415cc299e0676aa7f42511a5dd846f16c9c"} Sep 29 19:24:14 crc kubenswrapper[4779]: I0929 19:24:14.705896 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"79136f36-d427-488e-81e2-ef55c73ee91a","Type":"ContainerStarted","Data":"689b7664f74c8bbdf919b5a5f4eeb9931f4968acdc8fd36ab47c88246f325c97"} Sep 29 19:24:14 crc kubenswrapper[4779]: I0929 19:24:14.705913 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7fd796d7df-7pg92" Sep 29 19:24:14 crc kubenswrapper[4779]: I0929 19:24:14.727056 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7fd796d7df-7pg92" Sep 29 19:24:14 crc kubenswrapper[4779]: I0929 19:24:14.737045 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/kube-state-metrics-0" podStartSLOduration=9.793608769 podStartE2EDuration="17.737029015s" podCreationTimestamp="2025-09-29 19:23:57 +0000 UTC" firstStartedPulling="2025-09-29 19:24:05.404691347 +0000 UTC m=+956.289116457" lastFinishedPulling="2025-09-29 19:24:13.348111613 +0000 UTC m=+964.232536703" observedRunningTime="2025-09-29 19:24:14.733349225 +0000 UTC m=+965.617774325" watchObservedRunningTime="2025-09-29 19:24:14.737029015 +0000 UTC m=+965.621454115" Sep 29 19:24:14 crc kubenswrapper[4779]: I0929 19:24:14.752987 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-86db49b7ff-85qrs" Sep 29 19:24:14 crc kubenswrapper[4779]: I0929 19:24:14.786994 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-rwqzd" podStartSLOduration=5.060660625 podStartE2EDuration="12.786980106s" podCreationTimestamp="2025-09-29 19:24:02 +0000 UTC" firstStartedPulling="2025-09-29 19:24:05.542044479 +0000 UTC m=+956.426469579" lastFinishedPulling="2025-09-29 19:24:13.26836395 +0000 UTC m=+964.152789060" observedRunningTime="2025-09-29 19:24:14.781489007 +0000 UTC m=+965.665914107" watchObservedRunningTime="2025-09-29 19:24:14.786980106 +0000 UTC m=+965.671405206" Sep 29 19:24:14 crc kubenswrapper[4779]: I0929 19:24:14.857732 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ef5982c8-0c16-4468-8256-63c4b71961a1-config\") pod \"ef5982c8-0c16-4468-8256-63c4b71961a1\" (UID: \"ef5982c8-0c16-4468-8256-63c4b71961a1\") " Sep 29 19:24:14 crc kubenswrapper[4779]: I0929 19:24:14.858163 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ef5982c8-0c16-4468-8256-63c4b71961a1-dns-svc\") pod \"ef5982c8-0c16-4468-8256-63c4b71961a1\" (UID: \"ef5982c8-0c16-4468-8256-63c4b71961a1\") " Sep 29 19:24:14 crc kubenswrapper[4779]: I0929 19:24:14.858684 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7bx7n\" (UniqueName: \"kubernetes.io/projected/ef5982c8-0c16-4468-8256-63c4b71961a1-kube-api-access-7bx7n\") pod \"ef5982c8-0c16-4468-8256-63c4b71961a1\" (UID: \"ef5982c8-0c16-4468-8256-63c4b71961a1\") " Sep 29 19:24:14 crc kubenswrapper[4779]: I0929 19:24:14.858864 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ef5982c8-0c16-4468-8256-63c4b71961a1-ovsdbserver-nb\") pod \"ef5982c8-0c16-4468-8256-63c4b71961a1\" (UID: \"ef5982c8-0c16-4468-8256-63c4b71961a1\") " Sep 29 19:24:14 crc kubenswrapper[4779]: I0929 19:24:14.861701 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ef5982c8-0c16-4468-8256-63c4b71961a1-config" (OuterVolumeSpecName: "config") pod "ef5982c8-0c16-4468-8256-63c4b71961a1" (UID: "ef5982c8-0c16-4468-8256-63c4b71961a1"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:24:14 crc kubenswrapper[4779]: I0929 19:24:14.862081 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ef5982c8-0c16-4468-8256-63c4b71961a1-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "ef5982c8-0c16-4468-8256-63c4b71961a1" (UID: "ef5982c8-0c16-4468-8256-63c4b71961a1"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:24:14 crc kubenswrapper[4779]: I0929 19:24:14.864123 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ef5982c8-0c16-4468-8256-63c4b71961a1-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "ef5982c8-0c16-4468-8256-63c4b71961a1" (UID: "ef5982c8-0c16-4468-8256-63c4b71961a1"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:24:14 crc kubenswrapper[4779]: I0929 19:24:14.865521 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ef5982c8-0c16-4468-8256-63c4b71961a1-kube-api-access-7bx7n" (OuterVolumeSpecName: "kube-api-access-7bx7n") pod "ef5982c8-0c16-4468-8256-63c4b71961a1" (UID: "ef5982c8-0c16-4468-8256-63c4b71961a1"). InnerVolumeSpecName "kube-api-access-7bx7n". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:24:14 crc kubenswrapper[4779]: I0929 19:24:14.900619 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-metrics-ds8vm"] Sep 29 19:24:14 crc kubenswrapper[4779]: I0929 19:24:14.961285 4779 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ef5982c8-0c16-4468-8256-63c4b71961a1-config\") on node \"crc\" DevicePath \"\"" Sep 29 19:24:14 crc kubenswrapper[4779]: I0929 19:24:14.961329 4779 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ef5982c8-0c16-4468-8256-63c4b71961a1-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 29 19:24:14 crc kubenswrapper[4779]: I0929 19:24:14.961340 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7bx7n\" (UniqueName: \"kubernetes.io/projected/ef5982c8-0c16-4468-8256-63c4b71961a1-kube-api-access-7bx7n\") on node \"crc\" DevicePath \"\"" Sep 29 19:24:14 crc kubenswrapper[4779]: I0929 19:24:14.961350 4779 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ef5982c8-0c16-4468-8256-63c4b71961a1-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Sep 29 19:24:15 crc kubenswrapper[4779]: I0929 19:24:15.294198 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-86db49b7ff-85qrs"] Sep 29 19:24:15 crc kubenswrapper[4779]: I0929 19:24:15.715185 4779 generic.go:334] "Generic (PLEG): container finished" podID="c23ebfd4-1d4f-45fc-9c53-888ce644ad45" containerID="f48f5a5b701bdcbb19d779ff7561db79e60791a1a724e8601585c5e0b0bc81d9" exitCode=0 Sep 29 19:24:15 crc kubenswrapper[4779]: I0929 19:24:15.715282 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-86db49b7ff-85qrs" event={"ID":"c23ebfd4-1d4f-45fc-9c53-888ce644ad45","Type":"ContainerDied","Data":"f48f5a5b701bdcbb19d779ff7561db79e60791a1a724e8601585c5e0b0bc81d9"} Sep 29 19:24:15 crc kubenswrapper[4779]: I0929 19:24:15.715506 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-86db49b7ff-85qrs" event={"ID":"c23ebfd4-1d4f-45fc-9c53-888ce644ad45","Type":"ContainerStarted","Data":"2aee646e9b14e2fdc6cd9b8a5e13d76065b925751d2a87e907c8c4342484e505"} Sep 29 19:24:15 crc kubenswrapper[4779]: I0929 19:24:15.718564 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-lx5dn" event={"ID":"8cebec61-5d6f-4bd2-a9e7-61f0b0f74751","Type":"ContainerStarted","Data":"c569c520c4e1d5226830cab5fad3181b7949f50ab208e91cbcb49df821016983"} Sep 29 19:24:15 crc kubenswrapper[4779]: I0929 19:24:15.718609 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-lx5dn" event={"ID":"8cebec61-5d6f-4bd2-a9e7-61f0b0f74751","Type":"ContainerStarted","Data":"e74664d7aec056f15fd6740fb3bf9789b0cf98eaba88765220f8355c7ee2f412"} Sep 29 19:24:15 crc kubenswrapper[4779]: I0929 19:24:15.718692 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-ovs-lx5dn" Sep 29 19:24:15 crc kubenswrapper[4779]: I0929 19:24:15.718721 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-ovs-lx5dn" Sep 29 19:24:15 crc kubenswrapper[4779]: I0929 19:24:15.722707 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-ds8vm" event={"ID":"e1ff3819-9cf2-492f-a447-94a898e0a54d","Type":"ContainerStarted","Data":"596570e410ba1e2f07145595729d964f7300da74d24f4c5d5534dc0091f63aa4"} Sep 29 19:24:15 crc kubenswrapper[4779]: I0929 19:24:15.723175 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-57d769cc4f-wn9td" podUID="92364a65-9a43-4f86-8798-daf475ca26e6" containerName="dnsmasq-dns" containerID="cri-o://49b769762112f2c598db9b5213e030193bfdd8bb1546476f2cdf1b43fdc3d9a0" gracePeriod=10 Sep 29 19:24:15 crc kubenswrapper[4779]: I0929 19:24:15.723206 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7fd796d7df-7pg92" Sep 29 19:24:15 crc kubenswrapper[4779]: I0929 19:24:15.760507 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-ovs-lx5dn" podStartSLOduration=6.656377571 podStartE2EDuration="13.760490939s" podCreationTimestamp="2025-09-29 19:24:02 +0000 UTC" firstStartedPulling="2025-09-29 19:24:05.670380226 +0000 UTC m=+956.554805326" lastFinishedPulling="2025-09-29 19:24:12.774493594 +0000 UTC m=+963.658918694" observedRunningTime="2025-09-29 19:24:15.757599371 +0000 UTC m=+966.642024481" watchObservedRunningTime="2025-09-29 19:24:15.760490939 +0000 UTC m=+966.644916029" Sep 29 19:24:15 crc kubenswrapper[4779]: I0929 19:24:15.810846 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7fd796d7df-7pg92"] Sep 29 19:24:15 crc kubenswrapper[4779]: I0929 19:24:15.815791 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-7fd796d7df-7pg92"] Sep 29 19:24:16 crc kubenswrapper[4779]: I0929 19:24:16.732409 4779 generic.go:334] "Generic (PLEG): container finished" podID="92364a65-9a43-4f86-8798-daf475ca26e6" containerID="49b769762112f2c598db9b5213e030193bfdd8bb1546476f2cdf1b43fdc3d9a0" exitCode=0 Sep 29 19:24:16 crc kubenswrapper[4779]: I0929 19:24:16.732512 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d769cc4f-wn9td" event={"ID":"92364a65-9a43-4f86-8798-daf475ca26e6","Type":"ContainerDied","Data":"49b769762112f2c598db9b5213e030193bfdd8bb1546476f2cdf1b43fdc3d9a0"} Sep 29 19:24:17 crc kubenswrapper[4779]: I0929 19:24:17.470077 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d769cc4f-wn9td" Sep 29 19:24:17 crc kubenswrapper[4779]: I0929 19:24:17.601103 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/92364a65-9a43-4f86-8798-daf475ca26e6-dns-svc\") pod \"92364a65-9a43-4f86-8798-daf475ca26e6\" (UID: \"92364a65-9a43-4f86-8798-daf475ca26e6\") " Sep 29 19:24:17 crc kubenswrapper[4779]: I0929 19:24:17.601280 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2hmft\" (UniqueName: \"kubernetes.io/projected/92364a65-9a43-4f86-8798-daf475ca26e6-kube-api-access-2hmft\") pod \"92364a65-9a43-4f86-8798-daf475ca26e6\" (UID: \"92364a65-9a43-4f86-8798-daf475ca26e6\") " Sep 29 19:24:17 crc kubenswrapper[4779]: I0929 19:24:17.601791 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/92364a65-9a43-4f86-8798-daf475ca26e6-config\") pod \"92364a65-9a43-4f86-8798-daf475ca26e6\" (UID: \"92364a65-9a43-4f86-8798-daf475ca26e6\") " Sep 29 19:24:17 crc kubenswrapper[4779]: I0929 19:24:17.607131 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/92364a65-9a43-4f86-8798-daf475ca26e6-kube-api-access-2hmft" (OuterVolumeSpecName: "kube-api-access-2hmft") pod "92364a65-9a43-4f86-8798-daf475ca26e6" (UID: "92364a65-9a43-4f86-8798-daf475ca26e6"). InnerVolumeSpecName "kube-api-access-2hmft". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:24:17 crc kubenswrapper[4779]: I0929 19:24:17.649631 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/92364a65-9a43-4f86-8798-daf475ca26e6-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "92364a65-9a43-4f86-8798-daf475ca26e6" (UID: "92364a65-9a43-4f86-8798-daf475ca26e6"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:24:17 crc kubenswrapper[4779]: I0929 19:24:17.652242 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/92364a65-9a43-4f86-8798-daf475ca26e6-config" (OuterVolumeSpecName: "config") pod "92364a65-9a43-4f86-8798-daf475ca26e6" (UID: "92364a65-9a43-4f86-8798-daf475ca26e6"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:24:17 crc kubenswrapper[4779]: I0929 19:24:17.704051 4779 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/92364a65-9a43-4f86-8798-daf475ca26e6-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 29 19:24:17 crc kubenswrapper[4779]: I0929 19:24:17.704085 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2hmft\" (UniqueName: \"kubernetes.io/projected/92364a65-9a43-4f86-8798-daf475ca26e6-kube-api-access-2hmft\") on node \"crc\" DevicePath \"\"" Sep 29 19:24:17 crc kubenswrapper[4779]: I0929 19:24:17.704098 4779 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/92364a65-9a43-4f86-8798-daf475ca26e6-config\") on node \"crc\" DevicePath \"\"" Sep 29 19:24:17 crc kubenswrapper[4779]: I0929 19:24:17.741524 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d769cc4f-wn9td" event={"ID":"92364a65-9a43-4f86-8798-daf475ca26e6","Type":"ContainerDied","Data":"9a44d0478a077dd18afd4b9ef04732b1d4a01bcac177419bbec663cb3eba95e3"} Sep 29 19:24:17 crc kubenswrapper[4779]: I0929 19:24:17.741549 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d769cc4f-wn9td" Sep 29 19:24:17 crc kubenswrapper[4779]: I0929 19:24:17.741592 4779 scope.go:117] "RemoveContainer" containerID="49b769762112f2c598db9b5213e030193bfdd8bb1546476f2cdf1b43fdc3d9a0" Sep 29 19:24:17 crc kubenswrapper[4779]: I0929 19:24:17.746624 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-ds8vm" event={"ID":"e1ff3819-9cf2-492f-a447-94a898e0a54d","Type":"ContainerStarted","Data":"3bfa5766a30185409070030f3207ca2d5bd0b36df173e043aa54b796c788eb08"} Sep 29 19:24:17 crc kubenswrapper[4779]: I0929 19:24:17.749914 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"79136f36-d427-488e-81e2-ef55c73ee91a","Type":"ContainerStarted","Data":"4509f757fb8a15e14fb7636f635b97eb0e0ff65b34a8fac48ba7059e99c65e4e"} Sep 29 19:24:17 crc kubenswrapper[4779]: I0929 19:24:17.752879 4779 generic.go:334] "Generic (PLEG): container finished" podID="31200251-0f84-4946-88fb-276aa79589d9" containerID="fb89b502ce9cf0e205f2a977c78f1c39af574a6dcc9c2232d717aa86270470e2" exitCode=0 Sep 29 19:24:17 crc kubenswrapper[4779]: I0929 19:24:17.752940 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"31200251-0f84-4946-88fb-276aa79589d9","Type":"ContainerDied","Data":"fb89b502ce9cf0e205f2a977c78f1c39af574a6dcc9c2232d717aa86270470e2"} Sep 29 19:24:17 crc kubenswrapper[4779]: I0929 19:24:17.755625 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"bbcdb00b-849b-40c6-9e53-2c751dbd11dd","Type":"ContainerStarted","Data":"b89a4e960f3e0ce938fdac42445db02098783df71cb37ad55619719da0444a0e"} Sep 29 19:24:17 crc kubenswrapper[4779]: I0929 19:24:17.757586 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-86db49b7ff-85qrs" event={"ID":"c23ebfd4-1d4f-45fc-9c53-888ce644ad45","Type":"ContainerStarted","Data":"9b2ded76b6b27b6fc82327f10134fd0a6ab8197cb2869808c2baace2847964b7"} Sep 29 19:24:17 crc kubenswrapper[4779]: I0929 19:24:17.759225 4779 generic.go:334] "Generic (PLEG): container finished" podID="f5cf224b-f179-4c3c-bc69-5f3d448aca1d" containerID="0fb980298cd73136188a0518c2f5df69e9dd3d7c353d543ee36dac2bd6dfbf46" exitCode=0 Sep 29 19:24:17 crc kubenswrapper[4779]: I0929 19:24:17.759269 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"f5cf224b-f179-4c3c-bc69-5f3d448aca1d","Type":"ContainerDied","Data":"0fb980298cd73136188a0518c2f5df69e9dd3d7c353d543ee36dac2bd6dfbf46"} Sep 29 19:24:17 crc kubenswrapper[4779]: I0929 19:24:17.774888 4779 scope.go:117] "RemoveContainer" containerID="ea3c071d5a29a22ec6c4c8677e08f1ce3b70883db3e673f8ab1a89d6e80d6d2f" Sep 29 19:24:17 crc kubenswrapper[4779]: I0929 19:24:17.779425 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-metrics-ds8vm" podStartSLOduration=1.529574569 podStartE2EDuration="3.779394507s" podCreationTimestamp="2025-09-29 19:24:14 +0000 UTC" firstStartedPulling="2025-09-29 19:24:14.970015263 +0000 UTC m=+965.854440363" lastFinishedPulling="2025-09-29 19:24:17.219835201 +0000 UTC m=+968.104260301" observedRunningTime="2025-09-29 19:24:17.765398836 +0000 UTC m=+968.649823956" watchObservedRunningTime="2025-09-29 19:24:17.779394507 +0000 UTC m=+968.663819647" Sep 29 19:24:17 crc kubenswrapper[4779]: I0929 19:24:17.783785 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ef5982c8-0c16-4468-8256-63c4b71961a1" path="/var/lib/kubelet/pods/ef5982c8-0c16-4468-8256-63c4b71961a1/volumes" Sep 29 19:24:17 crc kubenswrapper[4779]: I0929 19:24:17.807130 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-nb-0" podStartSLOduration=6.162758732 podStartE2EDuration="17.807107772s" podCreationTimestamp="2025-09-29 19:24:00 +0000 UTC" firstStartedPulling="2025-09-29 19:24:05.584052204 +0000 UTC m=+956.468477294" lastFinishedPulling="2025-09-29 19:24:17.228401234 +0000 UTC m=+968.112826334" observedRunningTime="2025-09-29 19:24:17.802365963 +0000 UTC m=+968.686791093" watchObservedRunningTime="2025-09-29 19:24:17.807107772 +0000 UTC m=+968.691532882" Sep 29 19:24:17 crc kubenswrapper[4779]: I0929 19:24:17.817095 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-86db49b7ff-85qrs" podStartSLOduration=3.817077184 podStartE2EDuration="3.817077184s" podCreationTimestamp="2025-09-29 19:24:14 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 19:24:17.816472927 +0000 UTC m=+968.700898037" watchObservedRunningTime="2025-09-29 19:24:17.817077184 +0000 UTC m=+968.701502294" Sep 29 19:24:17 crc kubenswrapper[4779]: I0929 19:24:17.841068 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-wn9td"] Sep 29 19:24:17 crc kubenswrapper[4779]: I0929 19:24:17.846353 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-wn9td"] Sep 29 19:24:17 crc kubenswrapper[4779]: I0929 19:24:17.906004 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-sb-0" podStartSLOduration=4.031372938 podStartE2EDuration="14.905983616s" podCreationTimestamp="2025-09-29 19:24:03 +0000 UTC" firstStartedPulling="2025-09-29 19:24:06.338797118 +0000 UTC m=+957.223222218" lastFinishedPulling="2025-09-29 19:24:17.213407796 +0000 UTC m=+968.097832896" observedRunningTime="2025-09-29 19:24:17.903782406 +0000 UTC m=+968.788207506" watchObservedRunningTime="2025-09-29 19:24:17.905983616 +0000 UTC m=+968.790408716" Sep 29 19:24:18 crc kubenswrapper[4779]: I0929 19:24:18.768044 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"31200251-0f84-4946-88fb-276aa79589d9","Type":"ContainerStarted","Data":"50318a92cb08bd41faa4e02575d912d3bdb6dd95579b0385939bb17239c60fa2"} Sep 29 19:24:18 crc kubenswrapper[4779]: I0929 19:24:18.770637 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"f5cf224b-f179-4c3c-bc69-5f3d448aca1d","Type":"ContainerStarted","Data":"f792358a26a47452ecdddfc9a883f8d2bfde995a4dbf38232f8e67118c0f8a4e"} Sep 29 19:24:18 crc kubenswrapper[4779]: I0929 19:24:18.772875 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-86db49b7ff-85qrs" Sep 29 19:24:18 crc kubenswrapper[4779]: I0929 19:24:18.808261 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstack-cell1-galera-0" podStartSLOduration=18.028463611 podStartE2EDuration="25.808243459s" podCreationTimestamp="2025-09-29 19:23:53 +0000 UTC" firstStartedPulling="2025-09-29 19:24:05.419645794 +0000 UTC m=+956.304070894" lastFinishedPulling="2025-09-29 19:24:13.199425642 +0000 UTC m=+964.083850742" observedRunningTime="2025-09-29 19:24:18.804214489 +0000 UTC m=+969.688639649" watchObservedRunningTime="2025-09-29 19:24:18.808243459 +0000 UTC m=+969.692668569" Sep 29 19:24:18 crc kubenswrapper[4779]: I0929 19:24:18.832413 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstack-galera-0" podStartSLOduration=19.624653675 podStartE2EDuration="26.832392667s" podCreationTimestamp="2025-09-29 19:23:52 +0000 UTC" firstStartedPulling="2025-09-29 19:24:05.567342478 +0000 UTC m=+956.451767578" lastFinishedPulling="2025-09-29 19:24:12.77508147 +0000 UTC m=+963.659506570" observedRunningTime="2025-09-29 19:24:18.829389375 +0000 UTC m=+969.713814475" watchObservedRunningTime="2025-09-29 19:24:18.832392667 +0000 UTC m=+969.716817777" Sep 29 19:24:19 crc kubenswrapper[4779]: I0929 19:24:19.783156 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="92364a65-9a43-4f86-8798-daf475ca26e6" path="/var/lib/kubelet/pods/92364a65-9a43-4f86-8798-daf475ca26e6/volumes" Sep 29 19:24:19 crc kubenswrapper[4779]: I0929 19:24:19.931947 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-sb-0" Sep 29 19:24:19 crc kubenswrapper[4779]: I0929 19:24:19.932340 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-sb-0" Sep 29 19:24:19 crc kubenswrapper[4779]: I0929 19:24:19.971963 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-sb-0" Sep 29 19:24:20 crc kubenswrapper[4779]: I0929 19:24:20.214435 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-nb-0" Sep 29 19:24:20 crc kubenswrapper[4779]: I0929 19:24:20.276288 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-nb-0" Sep 29 19:24:20 crc kubenswrapper[4779]: I0929 19:24:20.578082 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/memcached-0" Sep 29 19:24:20 crc kubenswrapper[4779]: I0929 19:24:20.789352 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-nb-0" Sep 29 19:24:20 crc kubenswrapper[4779]: I0929 19:24:20.841167 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-sb-0" Sep 29 19:24:20 crc kubenswrapper[4779]: I0929 19:24:20.960305 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-nb-0" Sep 29 19:24:21 crc kubenswrapper[4779]: I0929 19:24:21.201431 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-northd-0"] Sep 29 19:24:21 crc kubenswrapper[4779]: E0929 19:24:21.201806 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="92364a65-9a43-4f86-8798-daf475ca26e6" containerName="init" Sep 29 19:24:21 crc kubenswrapper[4779]: I0929 19:24:21.201829 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="92364a65-9a43-4f86-8798-daf475ca26e6" containerName="init" Sep 29 19:24:21 crc kubenswrapper[4779]: E0929 19:24:21.201860 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="92364a65-9a43-4f86-8798-daf475ca26e6" containerName="dnsmasq-dns" Sep 29 19:24:21 crc kubenswrapper[4779]: I0929 19:24:21.201872 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="92364a65-9a43-4f86-8798-daf475ca26e6" containerName="dnsmasq-dns" Sep 29 19:24:21 crc kubenswrapper[4779]: I0929 19:24:21.202076 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="92364a65-9a43-4f86-8798-daf475ca26e6" containerName="dnsmasq-dns" Sep 29 19:24:21 crc kubenswrapper[4779]: I0929 19:24:21.203148 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Sep 29 19:24:21 crc kubenswrapper[4779]: I0929 19:24:21.209790 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovnnorthd-scripts" Sep 29 19:24:21 crc kubenswrapper[4779]: I0929 19:24:21.210058 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovnnorthd-ovndbs" Sep 29 19:24:21 crc kubenswrapper[4779]: I0929 19:24:21.210225 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovnnorthd-config" Sep 29 19:24:21 crc kubenswrapper[4779]: I0929 19:24:21.211160 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovnnorthd-ovnnorthd-dockercfg-t9bxx" Sep 29 19:24:21 crc kubenswrapper[4779]: I0929 19:24:21.220558 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-northd-0"] Sep 29 19:24:21 crc kubenswrapper[4779]: I0929 19:24:21.274985 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/210875cc-31fa-480b-bd01-a042fc73dcd6-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"210875cc-31fa-480b-bd01-a042fc73dcd6\") " pod="openstack/ovn-northd-0" Sep 29 19:24:21 crc kubenswrapper[4779]: I0929 19:24:21.275057 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2hj6m\" (UniqueName: \"kubernetes.io/projected/210875cc-31fa-480b-bd01-a042fc73dcd6-kube-api-access-2hj6m\") pod \"ovn-northd-0\" (UID: \"210875cc-31fa-480b-bd01-a042fc73dcd6\") " pod="openstack/ovn-northd-0" Sep 29 19:24:21 crc kubenswrapper[4779]: I0929 19:24:21.275078 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/210875cc-31fa-480b-bd01-a042fc73dcd6-scripts\") pod \"ovn-northd-0\" (UID: \"210875cc-31fa-480b-bd01-a042fc73dcd6\") " pod="openstack/ovn-northd-0" Sep 29 19:24:21 crc kubenswrapper[4779]: I0929 19:24:21.275108 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/210875cc-31fa-480b-bd01-a042fc73dcd6-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"210875cc-31fa-480b-bd01-a042fc73dcd6\") " pod="openstack/ovn-northd-0" Sep 29 19:24:21 crc kubenswrapper[4779]: I0929 19:24:21.275267 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210875cc-31fa-480b-bd01-a042fc73dcd6-config\") pod \"ovn-northd-0\" (UID: \"210875cc-31fa-480b-bd01-a042fc73dcd6\") " pod="openstack/ovn-northd-0" Sep 29 19:24:21 crc kubenswrapper[4779]: I0929 19:24:21.275422 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/210875cc-31fa-480b-bd01-a042fc73dcd6-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"210875cc-31fa-480b-bd01-a042fc73dcd6\") " pod="openstack/ovn-northd-0" Sep 29 19:24:21 crc kubenswrapper[4779]: I0929 19:24:21.275574 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/210875cc-31fa-480b-bd01-a042fc73dcd6-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"210875cc-31fa-480b-bd01-a042fc73dcd6\") " pod="openstack/ovn-northd-0" Sep 29 19:24:21 crc kubenswrapper[4779]: I0929 19:24:21.376883 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2hj6m\" (UniqueName: \"kubernetes.io/projected/210875cc-31fa-480b-bd01-a042fc73dcd6-kube-api-access-2hj6m\") pod \"ovn-northd-0\" (UID: \"210875cc-31fa-480b-bd01-a042fc73dcd6\") " pod="openstack/ovn-northd-0" Sep 29 19:24:21 crc kubenswrapper[4779]: I0929 19:24:21.376922 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/210875cc-31fa-480b-bd01-a042fc73dcd6-scripts\") pod \"ovn-northd-0\" (UID: \"210875cc-31fa-480b-bd01-a042fc73dcd6\") " pod="openstack/ovn-northd-0" Sep 29 19:24:21 crc kubenswrapper[4779]: I0929 19:24:21.376950 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/210875cc-31fa-480b-bd01-a042fc73dcd6-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"210875cc-31fa-480b-bd01-a042fc73dcd6\") " pod="openstack/ovn-northd-0" Sep 29 19:24:21 crc kubenswrapper[4779]: I0929 19:24:21.376977 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210875cc-31fa-480b-bd01-a042fc73dcd6-config\") pod \"ovn-northd-0\" (UID: \"210875cc-31fa-480b-bd01-a042fc73dcd6\") " pod="openstack/ovn-northd-0" Sep 29 19:24:21 crc kubenswrapper[4779]: I0929 19:24:21.377006 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/210875cc-31fa-480b-bd01-a042fc73dcd6-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"210875cc-31fa-480b-bd01-a042fc73dcd6\") " pod="openstack/ovn-northd-0" Sep 29 19:24:21 crc kubenswrapper[4779]: I0929 19:24:21.377039 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/210875cc-31fa-480b-bd01-a042fc73dcd6-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"210875cc-31fa-480b-bd01-a042fc73dcd6\") " pod="openstack/ovn-northd-0" Sep 29 19:24:21 crc kubenswrapper[4779]: I0929 19:24:21.377073 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/210875cc-31fa-480b-bd01-a042fc73dcd6-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"210875cc-31fa-480b-bd01-a042fc73dcd6\") " pod="openstack/ovn-northd-0" Sep 29 19:24:21 crc kubenswrapper[4779]: I0929 19:24:21.378606 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/210875cc-31fa-480b-bd01-a042fc73dcd6-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"210875cc-31fa-480b-bd01-a042fc73dcd6\") " pod="openstack/ovn-northd-0" Sep 29 19:24:21 crc kubenswrapper[4779]: I0929 19:24:21.378706 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210875cc-31fa-480b-bd01-a042fc73dcd6-config\") pod \"ovn-northd-0\" (UID: \"210875cc-31fa-480b-bd01-a042fc73dcd6\") " pod="openstack/ovn-northd-0" Sep 29 19:24:21 crc kubenswrapper[4779]: I0929 19:24:21.378941 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/210875cc-31fa-480b-bd01-a042fc73dcd6-scripts\") pod \"ovn-northd-0\" (UID: \"210875cc-31fa-480b-bd01-a042fc73dcd6\") " pod="openstack/ovn-northd-0" Sep 29 19:24:21 crc kubenswrapper[4779]: I0929 19:24:21.382719 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/210875cc-31fa-480b-bd01-a042fc73dcd6-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"210875cc-31fa-480b-bd01-a042fc73dcd6\") " pod="openstack/ovn-northd-0" Sep 29 19:24:21 crc kubenswrapper[4779]: I0929 19:24:21.382910 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/210875cc-31fa-480b-bd01-a042fc73dcd6-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"210875cc-31fa-480b-bd01-a042fc73dcd6\") " pod="openstack/ovn-northd-0" Sep 29 19:24:21 crc kubenswrapper[4779]: I0929 19:24:21.395834 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/210875cc-31fa-480b-bd01-a042fc73dcd6-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"210875cc-31fa-480b-bd01-a042fc73dcd6\") " pod="openstack/ovn-northd-0" Sep 29 19:24:21 crc kubenswrapper[4779]: I0929 19:24:21.402789 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2hj6m\" (UniqueName: \"kubernetes.io/projected/210875cc-31fa-480b-bd01-a042fc73dcd6-kube-api-access-2hj6m\") pod \"ovn-northd-0\" (UID: \"210875cc-31fa-480b-bd01-a042fc73dcd6\") " pod="openstack/ovn-northd-0" Sep 29 19:24:21 crc kubenswrapper[4779]: I0929 19:24:21.525176 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Sep 29 19:24:21 crc kubenswrapper[4779]: W0929 19:24:21.775669 4779 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod210875cc_31fa_480b_bd01_a042fc73dcd6.slice/crio-31a5f9e7abd7451ed0c9480a6422bb7491e3b837ae642e26d853ea837ede62e9 WatchSource:0}: Error finding container 31a5f9e7abd7451ed0c9480a6422bb7491e3b837ae642e26d853ea837ede62e9: Status 404 returned error can't find the container with id 31a5f9e7abd7451ed0c9480a6422bb7491e3b837ae642e26d853ea837ede62e9 Sep 29 19:24:21 crc kubenswrapper[4779]: I0929 19:24:21.785315 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-northd-0"] Sep 29 19:24:21 crc kubenswrapper[4779]: I0929 19:24:21.797189 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"210875cc-31fa-480b-bd01-a042fc73dcd6","Type":"ContainerStarted","Data":"31a5f9e7abd7451ed0c9480a6422bb7491e3b837ae642e26d853ea837ede62e9"} Sep 29 19:24:23 crc kubenswrapper[4779]: I0929 19:24:23.815893 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"210875cc-31fa-480b-bd01-a042fc73dcd6","Type":"ContainerStarted","Data":"3be7c9bec6fd1cdace1be71d7af9999a38624df570ad079b52aca6c473c83aa2"} Sep 29 19:24:23 crc kubenswrapper[4779]: I0929 19:24:23.816226 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-northd-0" Sep 29 19:24:23 crc kubenswrapper[4779]: I0929 19:24:23.816247 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"210875cc-31fa-480b-bd01-a042fc73dcd6","Type":"ContainerStarted","Data":"0b822f7b9fdae730cddb4c99d39e436c398dacc7ff2945fd7c14b23e2720ebff"} Sep 29 19:24:23 crc kubenswrapper[4779]: I0929 19:24:23.849138 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-northd-0" podStartSLOduration=1.916235859 podStartE2EDuration="2.849110119s" podCreationTimestamp="2025-09-29 19:24:21 +0000 UTC" firstStartedPulling="2025-09-29 19:24:21.777743129 +0000 UTC m=+972.662168229" lastFinishedPulling="2025-09-29 19:24:22.710617389 +0000 UTC m=+973.595042489" observedRunningTime="2025-09-29 19:24:23.838004457 +0000 UTC m=+974.722429557" watchObservedRunningTime="2025-09-29 19:24:23.849110119 +0000 UTC m=+974.733535259" Sep 29 19:24:24 crc kubenswrapper[4779]: I0929 19:24:24.180172 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/openstack-galera-0" Sep 29 19:24:24 crc kubenswrapper[4779]: I0929 19:24:24.180385 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/openstack-galera-0" Sep 29 19:24:24 crc kubenswrapper[4779]: I0929 19:24:24.267628 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/openstack-galera-0" Sep 29 19:24:24 crc kubenswrapper[4779]: I0929 19:24:24.755679 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-86db49b7ff-85qrs" Sep 29 19:24:24 crc kubenswrapper[4779]: I0929 19:24:24.815210 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-79mzc"] Sep 29 19:24:24 crc kubenswrapper[4779]: I0929 19:24:24.815572 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-666b6646f7-79mzc" podUID="7687e8ef-621c-432c-8153-dbc11707461f" containerName="dnsmasq-dns" containerID="cri-o://867893830a26ed3fa1e4aea27964795ac0f46b802ececc8fa49e0e25fb503db6" gracePeriod=10 Sep 29 19:24:24 crc kubenswrapper[4779]: I0929 19:24:24.953215 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/openstack-galera-0" Sep 29 19:24:25 crc kubenswrapper[4779]: I0929 19:24:25.070896 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/openstack-cell1-galera-0" Sep 29 19:24:25 crc kubenswrapper[4779]: I0929 19:24:25.070951 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/openstack-cell1-galera-0" Sep 29 19:24:25 crc kubenswrapper[4779]: I0929 19:24:25.117909 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/openstack-cell1-galera-0" Sep 29 19:24:25 crc kubenswrapper[4779]: I0929 19:24:25.341451 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-db-create-kzm6s"] Sep 29 19:24:25 crc kubenswrapper[4779]: I0929 19:24:25.342860 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-kzm6s" Sep 29 19:24:25 crc kubenswrapper[4779]: I0929 19:24:25.351780 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-create-kzm6s"] Sep 29 19:24:25 crc kubenswrapper[4779]: I0929 19:24:25.356973 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-666b6646f7-79mzc" Sep 29 19:24:25 crc kubenswrapper[4779]: I0929 19:24:25.461854 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7687e8ef-621c-432c-8153-dbc11707461f-config\") pod \"7687e8ef-621c-432c-8153-dbc11707461f\" (UID: \"7687e8ef-621c-432c-8153-dbc11707461f\") " Sep 29 19:24:25 crc kubenswrapper[4779]: I0929 19:24:25.462122 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wzx45\" (UniqueName: \"kubernetes.io/projected/7687e8ef-621c-432c-8153-dbc11707461f-kube-api-access-wzx45\") pod \"7687e8ef-621c-432c-8153-dbc11707461f\" (UID: \"7687e8ef-621c-432c-8153-dbc11707461f\") " Sep 29 19:24:25 crc kubenswrapper[4779]: I0929 19:24:25.462305 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7687e8ef-621c-432c-8153-dbc11707461f-dns-svc\") pod \"7687e8ef-621c-432c-8153-dbc11707461f\" (UID: \"7687e8ef-621c-432c-8153-dbc11707461f\") " Sep 29 19:24:25 crc kubenswrapper[4779]: I0929 19:24:25.462771 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xcm2t\" (UniqueName: \"kubernetes.io/projected/9a729519-3dac-4480-a83a-99f5fb79a284-kube-api-access-xcm2t\") pod \"keystone-db-create-kzm6s\" (UID: \"9a729519-3dac-4480-a83a-99f5fb79a284\") " pod="openstack/keystone-db-create-kzm6s" Sep 29 19:24:25 crc kubenswrapper[4779]: I0929 19:24:25.469107 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7687e8ef-621c-432c-8153-dbc11707461f-kube-api-access-wzx45" (OuterVolumeSpecName: "kube-api-access-wzx45") pod "7687e8ef-621c-432c-8153-dbc11707461f" (UID: "7687e8ef-621c-432c-8153-dbc11707461f"). InnerVolumeSpecName "kube-api-access-wzx45". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:24:25 crc kubenswrapper[4779]: I0929 19:24:25.482573 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-db-create-4q8h8"] Sep 29 19:24:25 crc kubenswrapper[4779]: E0929 19:24:25.483160 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7687e8ef-621c-432c-8153-dbc11707461f" containerName="dnsmasq-dns" Sep 29 19:24:25 crc kubenswrapper[4779]: I0929 19:24:25.483247 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="7687e8ef-621c-432c-8153-dbc11707461f" containerName="dnsmasq-dns" Sep 29 19:24:25 crc kubenswrapper[4779]: E0929 19:24:25.483355 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7687e8ef-621c-432c-8153-dbc11707461f" containerName="init" Sep 29 19:24:25 crc kubenswrapper[4779]: I0929 19:24:25.483429 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="7687e8ef-621c-432c-8153-dbc11707461f" containerName="init" Sep 29 19:24:25 crc kubenswrapper[4779]: I0929 19:24:25.483681 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="7687e8ef-621c-432c-8153-dbc11707461f" containerName="dnsmasq-dns" Sep 29 19:24:25 crc kubenswrapper[4779]: I0929 19:24:25.485472 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-4q8h8" Sep 29 19:24:25 crc kubenswrapper[4779]: I0929 19:24:25.490990 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-create-4q8h8"] Sep 29 19:24:25 crc kubenswrapper[4779]: I0929 19:24:25.505810 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7687e8ef-621c-432c-8153-dbc11707461f-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "7687e8ef-621c-432c-8153-dbc11707461f" (UID: "7687e8ef-621c-432c-8153-dbc11707461f"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:24:25 crc kubenswrapper[4779]: I0929 19:24:25.525757 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7687e8ef-621c-432c-8153-dbc11707461f-config" (OuterVolumeSpecName: "config") pod "7687e8ef-621c-432c-8153-dbc11707461f" (UID: "7687e8ef-621c-432c-8153-dbc11707461f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:24:25 crc kubenswrapper[4779]: I0929 19:24:25.564206 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xcm2t\" (UniqueName: \"kubernetes.io/projected/9a729519-3dac-4480-a83a-99f5fb79a284-kube-api-access-xcm2t\") pod \"keystone-db-create-kzm6s\" (UID: \"9a729519-3dac-4480-a83a-99f5fb79a284\") " pod="openstack/keystone-db-create-kzm6s" Sep 29 19:24:25 crc kubenswrapper[4779]: I0929 19:24:25.564339 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wzx45\" (UniqueName: \"kubernetes.io/projected/7687e8ef-621c-432c-8153-dbc11707461f-kube-api-access-wzx45\") on node \"crc\" DevicePath \"\"" Sep 29 19:24:25 crc kubenswrapper[4779]: I0929 19:24:25.564365 4779 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7687e8ef-621c-432c-8153-dbc11707461f-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 29 19:24:25 crc kubenswrapper[4779]: I0929 19:24:25.564377 4779 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7687e8ef-621c-432c-8153-dbc11707461f-config\") on node \"crc\" DevicePath \"\"" Sep 29 19:24:25 crc kubenswrapper[4779]: I0929 19:24:25.578939 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xcm2t\" (UniqueName: \"kubernetes.io/projected/9a729519-3dac-4480-a83a-99f5fb79a284-kube-api-access-xcm2t\") pod \"keystone-db-create-kzm6s\" (UID: \"9a729519-3dac-4480-a83a-99f5fb79a284\") " pod="openstack/keystone-db-create-kzm6s" Sep 29 19:24:25 crc kubenswrapper[4779]: I0929 19:24:25.665621 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-kzm6s" Sep 29 19:24:25 crc kubenswrapper[4779]: I0929 19:24:25.666177 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-45vtq\" (UniqueName: \"kubernetes.io/projected/d8365cf2-c10d-40a2-9bfb-1a386175d137-kube-api-access-45vtq\") pod \"placement-db-create-4q8h8\" (UID: \"d8365cf2-c10d-40a2-9bfb-1a386175d137\") " pod="openstack/placement-db-create-4q8h8" Sep 29 19:24:25 crc kubenswrapper[4779]: I0929 19:24:25.767798 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-45vtq\" (UniqueName: \"kubernetes.io/projected/d8365cf2-c10d-40a2-9bfb-1a386175d137-kube-api-access-45vtq\") pod \"placement-db-create-4q8h8\" (UID: \"d8365cf2-c10d-40a2-9bfb-1a386175d137\") " pod="openstack/placement-db-create-4q8h8" Sep 29 19:24:25 crc kubenswrapper[4779]: I0929 19:24:25.792876 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-45vtq\" (UniqueName: \"kubernetes.io/projected/d8365cf2-c10d-40a2-9bfb-1a386175d137-kube-api-access-45vtq\") pod \"placement-db-create-4q8h8\" (UID: \"d8365cf2-c10d-40a2-9bfb-1a386175d137\") " pod="openstack/placement-db-create-4q8h8" Sep 29 19:24:25 crc kubenswrapper[4779]: I0929 19:24:25.832439 4779 generic.go:334] "Generic (PLEG): container finished" podID="7687e8ef-621c-432c-8153-dbc11707461f" containerID="867893830a26ed3fa1e4aea27964795ac0f46b802ececc8fa49e0e25fb503db6" exitCode=0 Sep 29 19:24:25 crc kubenswrapper[4779]: I0929 19:24:25.832521 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-666b6646f7-79mzc" Sep 29 19:24:25 crc kubenswrapper[4779]: I0929 19:24:25.832596 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-666b6646f7-79mzc" event={"ID":"7687e8ef-621c-432c-8153-dbc11707461f","Type":"ContainerDied","Data":"867893830a26ed3fa1e4aea27964795ac0f46b802ececc8fa49e0e25fb503db6"} Sep 29 19:24:25 crc kubenswrapper[4779]: I0929 19:24:25.833158 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-666b6646f7-79mzc" event={"ID":"7687e8ef-621c-432c-8153-dbc11707461f","Type":"ContainerDied","Data":"895bb25f895aa3004f41f82cfd05c8e848078250c34fd757828fec8cbd3d98cc"} Sep 29 19:24:25 crc kubenswrapper[4779]: I0929 19:24:25.833279 4779 scope.go:117] "RemoveContainer" containerID="867893830a26ed3fa1e4aea27964795ac0f46b802ececc8fa49e0e25fb503db6" Sep 29 19:24:25 crc kubenswrapper[4779]: I0929 19:24:25.863541 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-79mzc"] Sep 29 19:24:25 crc kubenswrapper[4779]: I0929 19:24:25.863755 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-4q8h8" Sep 29 19:24:25 crc kubenswrapper[4779]: I0929 19:24:25.872145 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-79mzc"] Sep 29 19:24:25 crc kubenswrapper[4779]: I0929 19:24:25.875040 4779 scope.go:117] "RemoveContainer" containerID="47df3acac219cdb1032810a53e07c21f4f32af5329d13598d1f8fc519838ecf4" Sep 29 19:24:25 crc kubenswrapper[4779]: I0929 19:24:25.883604 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/openstack-cell1-galera-0" Sep 29 19:24:25 crc kubenswrapper[4779]: I0929 19:24:25.900061 4779 scope.go:117] "RemoveContainer" containerID="867893830a26ed3fa1e4aea27964795ac0f46b802ececc8fa49e0e25fb503db6" Sep 29 19:24:25 crc kubenswrapper[4779]: E0929 19:24:25.900581 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"867893830a26ed3fa1e4aea27964795ac0f46b802ececc8fa49e0e25fb503db6\": container with ID starting with 867893830a26ed3fa1e4aea27964795ac0f46b802ececc8fa49e0e25fb503db6 not found: ID does not exist" containerID="867893830a26ed3fa1e4aea27964795ac0f46b802ececc8fa49e0e25fb503db6" Sep 29 19:24:25 crc kubenswrapper[4779]: I0929 19:24:25.900631 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"867893830a26ed3fa1e4aea27964795ac0f46b802ececc8fa49e0e25fb503db6"} err="failed to get container status \"867893830a26ed3fa1e4aea27964795ac0f46b802ececc8fa49e0e25fb503db6\": rpc error: code = NotFound desc = could not find container \"867893830a26ed3fa1e4aea27964795ac0f46b802ececc8fa49e0e25fb503db6\": container with ID starting with 867893830a26ed3fa1e4aea27964795ac0f46b802ececc8fa49e0e25fb503db6 not found: ID does not exist" Sep 29 19:24:25 crc kubenswrapper[4779]: I0929 19:24:25.900661 4779 scope.go:117] "RemoveContainer" containerID="47df3acac219cdb1032810a53e07c21f4f32af5329d13598d1f8fc519838ecf4" Sep 29 19:24:25 crc kubenswrapper[4779]: E0929 19:24:25.901709 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"47df3acac219cdb1032810a53e07c21f4f32af5329d13598d1f8fc519838ecf4\": container with ID starting with 47df3acac219cdb1032810a53e07c21f4f32af5329d13598d1f8fc519838ecf4 not found: ID does not exist" containerID="47df3acac219cdb1032810a53e07c21f4f32af5329d13598d1f8fc519838ecf4" Sep 29 19:24:25 crc kubenswrapper[4779]: I0929 19:24:25.901741 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"47df3acac219cdb1032810a53e07c21f4f32af5329d13598d1f8fc519838ecf4"} err="failed to get container status \"47df3acac219cdb1032810a53e07c21f4f32af5329d13598d1f8fc519838ecf4\": rpc error: code = NotFound desc = could not find container \"47df3acac219cdb1032810a53e07c21f4f32af5329d13598d1f8fc519838ecf4\": container with ID starting with 47df3acac219cdb1032810a53e07c21f4f32af5329d13598d1f8fc519838ecf4 not found: ID does not exist" Sep 29 19:24:26 crc kubenswrapper[4779]: I0929 19:24:26.131969 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-create-kzm6s"] Sep 29 19:24:26 crc kubenswrapper[4779]: W0929 19:24:26.139294 4779 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9a729519_3dac_4480_a83a_99f5fb79a284.slice/crio-fa3ebfd215fc507638fb04f26345f06c4da794b759c1a1cf8bf706b9a4bfb36f WatchSource:0}: Error finding container fa3ebfd215fc507638fb04f26345f06c4da794b759c1a1cf8bf706b9a4bfb36f: Status 404 returned error can't find the container with id fa3ebfd215fc507638fb04f26345f06c4da794b759c1a1cf8bf706b9a4bfb36f Sep 29 19:24:26 crc kubenswrapper[4779]: I0929 19:24:26.323879 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-create-4q8h8"] Sep 29 19:24:26 crc kubenswrapper[4779]: W0929 19:24:26.329129 4779 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd8365cf2_c10d_40a2_9bfb_1a386175d137.slice/crio-ea4490718ea82c70a94d60b529853cc28bc07c078d536f5f3d74f59e9dba0d21 WatchSource:0}: Error finding container ea4490718ea82c70a94d60b529853cc28bc07c078d536f5f3d74f59e9dba0d21: Status 404 returned error can't find the container with id ea4490718ea82c70a94d60b529853cc28bc07c078d536f5f3d74f59e9dba0d21 Sep 29 19:24:26 crc kubenswrapper[4779]: I0929 19:24:26.841847 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-kzm6s" event={"ID":"9a729519-3dac-4480-a83a-99f5fb79a284","Type":"ContainerStarted","Data":"fa3ebfd215fc507638fb04f26345f06c4da794b759c1a1cf8bf706b9a4bfb36f"} Sep 29 19:24:26 crc kubenswrapper[4779]: I0929 19:24:26.844222 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-4q8h8" event={"ID":"d8365cf2-c10d-40a2-9bfb-1a386175d137","Type":"ContainerStarted","Data":"ea4490718ea82c70a94d60b529853cc28bc07c078d536f5f3d74f59e9dba0d21"} Sep 29 19:24:27 crc kubenswrapper[4779]: I0929 19:24:27.620012 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/kube-state-metrics-0" Sep 29 19:24:27 crc kubenswrapper[4779]: I0929 19:24:27.700562 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-698758b865-lww4s"] Sep 29 19:24:27 crc kubenswrapper[4779]: I0929 19:24:27.707448 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-698758b865-lww4s" Sep 29 19:24:27 crc kubenswrapper[4779]: I0929 19:24:27.716533 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-698758b865-lww4s"] Sep 29 19:24:27 crc kubenswrapper[4779]: I0929 19:24:27.778370 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7687e8ef-621c-432c-8153-dbc11707461f" path="/var/lib/kubelet/pods/7687e8ef-621c-432c-8153-dbc11707461f/volumes" Sep 29 19:24:27 crc kubenswrapper[4779]: I0929 19:24:27.841574 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kvshv\" (UniqueName: \"kubernetes.io/projected/7b7daa08-8885-418b-bff6-582ebb727b73-kube-api-access-kvshv\") pod \"dnsmasq-dns-698758b865-lww4s\" (UID: \"7b7daa08-8885-418b-bff6-582ebb727b73\") " pod="openstack/dnsmasq-dns-698758b865-lww4s" Sep 29 19:24:27 crc kubenswrapper[4779]: I0929 19:24:27.841653 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7b7daa08-8885-418b-bff6-582ebb727b73-dns-svc\") pod \"dnsmasq-dns-698758b865-lww4s\" (UID: \"7b7daa08-8885-418b-bff6-582ebb727b73\") " pod="openstack/dnsmasq-dns-698758b865-lww4s" Sep 29 19:24:27 crc kubenswrapper[4779]: I0929 19:24:27.841678 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7b7daa08-8885-418b-bff6-582ebb727b73-config\") pod \"dnsmasq-dns-698758b865-lww4s\" (UID: \"7b7daa08-8885-418b-bff6-582ebb727b73\") " pod="openstack/dnsmasq-dns-698758b865-lww4s" Sep 29 19:24:27 crc kubenswrapper[4779]: I0929 19:24:27.841769 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/7b7daa08-8885-418b-bff6-582ebb727b73-ovsdbserver-nb\") pod \"dnsmasq-dns-698758b865-lww4s\" (UID: \"7b7daa08-8885-418b-bff6-582ebb727b73\") " pod="openstack/dnsmasq-dns-698758b865-lww4s" Sep 29 19:24:27 crc kubenswrapper[4779]: I0929 19:24:27.841876 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/7b7daa08-8885-418b-bff6-582ebb727b73-ovsdbserver-sb\") pod \"dnsmasq-dns-698758b865-lww4s\" (UID: \"7b7daa08-8885-418b-bff6-582ebb727b73\") " pod="openstack/dnsmasq-dns-698758b865-lww4s" Sep 29 19:24:27 crc kubenswrapper[4779]: I0929 19:24:27.853822 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-kzm6s" event={"ID":"9a729519-3dac-4480-a83a-99f5fb79a284","Type":"ContainerStarted","Data":"b3aa197877ec7e530c30943503880e07fa9de4bc59f8b25788504d3932934546"} Sep 29 19:24:27 crc kubenswrapper[4779]: I0929 19:24:27.943100 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/7b7daa08-8885-418b-bff6-582ebb727b73-ovsdbserver-sb\") pod \"dnsmasq-dns-698758b865-lww4s\" (UID: \"7b7daa08-8885-418b-bff6-582ebb727b73\") " pod="openstack/dnsmasq-dns-698758b865-lww4s" Sep 29 19:24:27 crc kubenswrapper[4779]: I0929 19:24:27.943187 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kvshv\" (UniqueName: \"kubernetes.io/projected/7b7daa08-8885-418b-bff6-582ebb727b73-kube-api-access-kvshv\") pod \"dnsmasq-dns-698758b865-lww4s\" (UID: \"7b7daa08-8885-418b-bff6-582ebb727b73\") " pod="openstack/dnsmasq-dns-698758b865-lww4s" Sep 29 19:24:27 crc kubenswrapper[4779]: I0929 19:24:27.943215 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7b7daa08-8885-418b-bff6-582ebb727b73-dns-svc\") pod \"dnsmasq-dns-698758b865-lww4s\" (UID: \"7b7daa08-8885-418b-bff6-582ebb727b73\") " pod="openstack/dnsmasq-dns-698758b865-lww4s" Sep 29 19:24:27 crc kubenswrapper[4779]: I0929 19:24:27.943241 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7b7daa08-8885-418b-bff6-582ebb727b73-config\") pod \"dnsmasq-dns-698758b865-lww4s\" (UID: \"7b7daa08-8885-418b-bff6-582ebb727b73\") " pod="openstack/dnsmasq-dns-698758b865-lww4s" Sep 29 19:24:27 crc kubenswrapper[4779]: I0929 19:24:27.943294 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/7b7daa08-8885-418b-bff6-582ebb727b73-ovsdbserver-nb\") pod \"dnsmasq-dns-698758b865-lww4s\" (UID: \"7b7daa08-8885-418b-bff6-582ebb727b73\") " pod="openstack/dnsmasq-dns-698758b865-lww4s" Sep 29 19:24:27 crc kubenswrapper[4779]: I0929 19:24:27.944118 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7b7daa08-8885-418b-bff6-582ebb727b73-config\") pod \"dnsmasq-dns-698758b865-lww4s\" (UID: \"7b7daa08-8885-418b-bff6-582ebb727b73\") " pod="openstack/dnsmasq-dns-698758b865-lww4s" Sep 29 19:24:27 crc kubenswrapper[4779]: I0929 19:24:27.944168 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/7b7daa08-8885-418b-bff6-582ebb727b73-ovsdbserver-nb\") pod \"dnsmasq-dns-698758b865-lww4s\" (UID: \"7b7daa08-8885-418b-bff6-582ebb727b73\") " pod="openstack/dnsmasq-dns-698758b865-lww4s" Sep 29 19:24:27 crc kubenswrapper[4779]: I0929 19:24:27.944190 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7b7daa08-8885-418b-bff6-582ebb727b73-dns-svc\") pod \"dnsmasq-dns-698758b865-lww4s\" (UID: \"7b7daa08-8885-418b-bff6-582ebb727b73\") " pod="openstack/dnsmasq-dns-698758b865-lww4s" Sep 29 19:24:27 crc kubenswrapper[4779]: I0929 19:24:27.944652 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/7b7daa08-8885-418b-bff6-582ebb727b73-ovsdbserver-sb\") pod \"dnsmasq-dns-698758b865-lww4s\" (UID: \"7b7daa08-8885-418b-bff6-582ebb727b73\") " pod="openstack/dnsmasq-dns-698758b865-lww4s" Sep 29 19:24:27 crc kubenswrapper[4779]: I0929 19:24:27.959487 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kvshv\" (UniqueName: \"kubernetes.io/projected/7b7daa08-8885-418b-bff6-582ebb727b73-kube-api-access-kvshv\") pod \"dnsmasq-dns-698758b865-lww4s\" (UID: \"7b7daa08-8885-418b-bff6-582ebb727b73\") " pod="openstack/dnsmasq-dns-698758b865-lww4s" Sep 29 19:24:28 crc kubenswrapper[4779]: I0929 19:24:28.027104 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-698758b865-lww4s" Sep 29 19:24:28 crc kubenswrapper[4779]: I0929 19:24:28.485458 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-698758b865-lww4s"] Sep 29 19:24:28 crc kubenswrapper[4779]: W0929 19:24:28.498876 4779 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7b7daa08_8885_418b_bff6_582ebb727b73.slice/crio-79cb62dd210f71b2ed7fa939eb7fc7cfabbb48110ccd17223c1f01aea5105fc5 WatchSource:0}: Error finding container 79cb62dd210f71b2ed7fa939eb7fc7cfabbb48110ccd17223c1f01aea5105fc5: Status 404 returned error can't find the container with id 79cb62dd210f71b2ed7fa939eb7fc7cfabbb48110ccd17223c1f01aea5105fc5 Sep 29 19:24:28 crc kubenswrapper[4779]: I0929 19:24:28.862428 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-698758b865-lww4s" event={"ID":"7b7daa08-8885-418b-bff6-582ebb727b73","Type":"ContainerStarted","Data":"79cb62dd210f71b2ed7fa939eb7fc7cfabbb48110ccd17223c1f01aea5105fc5"} Sep 29 19:24:28 crc kubenswrapper[4779]: I0929 19:24:28.919990 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-storage-0"] Sep 29 19:24:28 crc kubenswrapper[4779]: I0929 19:24:28.927130 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-storage-0" Sep 29 19:24:28 crc kubenswrapper[4779]: I0929 19:24:28.929825 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-files" Sep 29 19:24:28 crc kubenswrapper[4779]: I0929 19:24:28.929885 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-conf" Sep 29 19:24:28 crc kubenswrapper[4779]: I0929 19:24:28.929920 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-storage-config-data" Sep 29 19:24:28 crc kubenswrapper[4779]: I0929 19:24:28.933736 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-swift-dockercfg-r4gf7" Sep 29 19:24:28 crc kubenswrapper[4779]: I0929 19:24:28.952217 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-storage-0"] Sep 29 19:24:29 crc kubenswrapper[4779]: I0929 19:24:29.060092 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/a214376c-8f64-4f89-9354-14de32e2f17f-etc-swift\") pod \"swift-storage-0\" (UID: \"a214376c-8f64-4f89-9354-14de32e2f17f\") " pod="openstack/swift-storage-0" Sep 29 19:24:29 crc kubenswrapper[4779]: I0929 19:24:29.060176 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/a214376c-8f64-4f89-9354-14de32e2f17f-lock\") pod \"swift-storage-0\" (UID: \"a214376c-8f64-4f89-9354-14de32e2f17f\") " pod="openstack/swift-storage-0" Sep 29 19:24:29 crc kubenswrapper[4779]: I0929 19:24:29.060217 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"swift-storage-0\" (UID: \"a214376c-8f64-4f89-9354-14de32e2f17f\") " pod="openstack/swift-storage-0" Sep 29 19:24:29 crc kubenswrapper[4779]: I0929 19:24:29.060253 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/a214376c-8f64-4f89-9354-14de32e2f17f-cache\") pod \"swift-storage-0\" (UID: \"a214376c-8f64-4f89-9354-14de32e2f17f\") " pod="openstack/swift-storage-0" Sep 29 19:24:29 crc kubenswrapper[4779]: I0929 19:24:29.060379 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qs7rr\" (UniqueName: \"kubernetes.io/projected/a214376c-8f64-4f89-9354-14de32e2f17f-kube-api-access-qs7rr\") pod \"swift-storage-0\" (UID: \"a214376c-8f64-4f89-9354-14de32e2f17f\") " pod="openstack/swift-storage-0" Sep 29 19:24:29 crc kubenswrapper[4779]: I0929 19:24:29.161658 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/a214376c-8f64-4f89-9354-14de32e2f17f-cache\") pod \"swift-storage-0\" (UID: \"a214376c-8f64-4f89-9354-14de32e2f17f\") " pod="openstack/swift-storage-0" Sep 29 19:24:29 crc kubenswrapper[4779]: I0929 19:24:29.162131 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qs7rr\" (UniqueName: \"kubernetes.io/projected/a214376c-8f64-4f89-9354-14de32e2f17f-kube-api-access-qs7rr\") pod \"swift-storage-0\" (UID: \"a214376c-8f64-4f89-9354-14de32e2f17f\") " pod="openstack/swift-storage-0" Sep 29 19:24:29 crc kubenswrapper[4779]: I0929 19:24:29.162583 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/a214376c-8f64-4f89-9354-14de32e2f17f-cache\") pod \"swift-storage-0\" (UID: \"a214376c-8f64-4f89-9354-14de32e2f17f\") " pod="openstack/swift-storage-0" Sep 29 19:24:29 crc kubenswrapper[4779]: I0929 19:24:29.163144 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/a214376c-8f64-4f89-9354-14de32e2f17f-etc-swift\") pod \"swift-storage-0\" (UID: \"a214376c-8f64-4f89-9354-14de32e2f17f\") " pod="openstack/swift-storage-0" Sep 29 19:24:29 crc kubenswrapper[4779]: E0929 19:24:29.163482 4779 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Sep 29 19:24:29 crc kubenswrapper[4779]: E0929 19:24:29.163761 4779 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Sep 29 19:24:29 crc kubenswrapper[4779]: E0929 19:24:29.163948 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/a214376c-8f64-4f89-9354-14de32e2f17f-etc-swift podName:a214376c-8f64-4f89-9354-14de32e2f17f nodeName:}" failed. No retries permitted until 2025-09-29 19:24:29.663919781 +0000 UTC m=+980.548344921 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/a214376c-8f64-4f89-9354-14de32e2f17f-etc-swift") pod "swift-storage-0" (UID: "a214376c-8f64-4f89-9354-14de32e2f17f") : configmap "swift-ring-files" not found Sep 29 19:24:29 crc kubenswrapper[4779]: I0929 19:24:29.164206 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/a214376c-8f64-4f89-9354-14de32e2f17f-lock\") pod \"swift-storage-0\" (UID: \"a214376c-8f64-4f89-9354-14de32e2f17f\") " pod="openstack/swift-storage-0" Sep 29 19:24:29 crc kubenswrapper[4779]: I0929 19:24:29.164526 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"swift-storage-0\" (UID: \"a214376c-8f64-4f89-9354-14de32e2f17f\") " pod="openstack/swift-storage-0" Sep 29 19:24:29 crc kubenswrapper[4779]: I0929 19:24:29.164763 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/a214376c-8f64-4f89-9354-14de32e2f17f-lock\") pod \"swift-storage-0\" (UID: \"a214376c-8f64-4f89-9354-14de32e2f17f\") " pod="openstack/swift-storage-0" Sep 29 19:24:29 crc kubenswrapper[4779]: I0929 19:24:29.165065 4779 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"swift-storage-0\" (UID: \"a214376c-8f64-4f89-9354-14de32e2f17f\") device mount path \"/mnt/openstack/pv10\"" pod="openstack/swift-storage-0" Sep 29 19:24:29 crc kubenswrapper[4779]: I0929 19:24:29.192435 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qs7rr\" (UniqueName: \"kubernetes.io/projected/a214376c-8f64-4f89-9354-14de32e2f17f-kube-api-access-qs7rr\") pod \"swift-storage-0\" (UID: \"a214376c-8f64-4f89-9354-14de32e2f17f\") " pod="openstack/swift-storage-0" Sep 29 19:24:29 crc kubenswrapper[4779]: I0929 19:24:29.206513 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"swift-storage-0\" (UID: \"a214376c-8f64-4f89-9354-14de32e2f17f\") " pod="openstack/swift-storage-0" Sep 29 19:24:29 crc kubenswrapper[4779]: I0929 19:24:29.674296 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/a214376c-8f64-4f89-9354-14de32e2f17f-etc-swift\") pod \"swift-storage-0\" (UID: \"a214376c-8f64-4f89-9354-14de32e2f17f\") " pod="openstack/swift-storage-0" Sep 29 19:24:29 crc kubenswrapper[4779]: E0929 19:24:29.674531 4779 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Sep 29 19:24:29 crc kubenswrapper[4779]: E0929 19:24:29.674658 4779 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Sep 29 19:24:29 crc kubenswrapper[4779]: E0929 19:24:29.674705 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/a214376c-8f64-4f89-9354-14de32e2f17f-etc-swift podName:a214376c-8f64-4f89-9354-14de32e2f17f nodeName:}" failed. No retries permitted until 2025-09-29 19:24:30.674690394 +0000 UTC m=+981.559115494 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/a214376c-8f64-4f89-9354-14de32e2f17f-etc-swift") pod "swift-storage-0" (UID: "a214376c-8f64-4f89-9354-14de32e2f17f") : configmap "swift-ring-files" not found Sep 29 19:24:30 crc kubenswrapper[4779]: I0929 19:24:30.692903 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/a214376c-8f64-4f89-9354-14de32e2f17f-etc-swift\") pod \"swift-storage-0\" (UID: \"a214376c-8f64-4f89-9354-14de32e2f17f\") " pod="openstack/swift-storage-0" Sep 29 19:24:30 crc kubenswrapper[4779]: E0929 19:24:30.693163 4779 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Sep 29 19:24:30 crc kubenswrapper[4779]: E0929 19:24:30.693195 4779 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Sep 29 19:24:30 crc kubenswrapper[4779]: E0929 19:24:30.693280 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/a214376c-8f64-4f89-9354-14de32e2f17f-etc-swift podName:a214376c-8f64-4f89-9354-14de32e2f17f nodeName:}" failed. No retries permitted until 2025-09-29 19:24:32.693254328 +0000 UTC m=+983.577679458 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/a214376c-8f64-4f89-9354-14de32e2f17f-etc-swift") pod "swift-storage-0" (UID: "a214376c-8f64-4f89-9354-14de32e2f17f") : configmap "swift-ring-files" not found Sep 29 19:24:30 crc kubenswrapper[4779]: I0929 19:24:30.782712 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-db-create-df4sn"] Sep 29 19:24:30 crc kubenswrapper[4779]: I0929 19:24:30.784636 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-df4sn" Sep 29 19:24:30 crc kubenswrapper[4779]: I0929 19:24:30.801036 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-create-df4sn"] Sep 29 19:24:30 crc kubenswrapper[4779]: I0929 19:24:30.882118 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-4q8h8" event={"ID":"d8365cf2-c10d-40a2-9bfb-1a386175d137","Type":"ContainerStarted","Data":"6ec2e0e43972651fadf2f1cda510284be6b29c03dd9312f38660af2b18c13af9"} Sep 29 19:24:30 crc kubenswrapper[4779]: I0929 19:24:30.896470 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-56mzj\" (UniqueName: \"kubernetes.io/projected/d04bbeb8-1f3e-47db-9c2e-a33c98f518b7-kube-api-access-56mzj\") pod \"glance-db-create-df4sn\" (UID: \"d04bbeb8-1f3e-47db-9c2e-a33c98f518b7\") " pod="openstack/glance-db-create-df4sn" Sep 29 19:24:30 crc kubenswrapper[4779]: I0929 19:24:30.998085 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-56mzj\" (UniqueName: \"kubernetes.io/projected/d04bbeb8-1f3e-47db-9c2e-a33c98f518b7-kube-api-access-56mzj\") pod \"glance-db-create-df4sn\" (UID: \"d04bbeb8-1f3e-47db-9c2e-a33c98f518b7\") " pod="openstack/glance-db-create-df4sn" Sep 29 19:24:31 crc kubenswrapper[4779]: I0929 19:24:31.035788 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-56mzj\" (UniqueName: \"kubernetes.io/projected/d04bbeb8-1f3e-47db-9c2e-a33c98f518b7-kube-api-access-56mzj\") pod \"glance-db-create-df4sn\" (UID: \"d04bbeb8-1f3e-47db-9c2e-a33c98f518b7\") " pod="openstack/glance-db-create-df4sn" Sep 29 19:24:31 crc kubenswrapper[4779]: I0929 19:24:31.148955 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-df4sn" Sep 29 19:24:31 crc kubenswrapper[4779]: I0929 19:24:31.637665 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-create-df4sn"] Sep 29 19:24:31 crc kubenswrapper[4779]: I0929 19:24:31.891350 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-df4sn" event={"ID":"d04bbeb8-1f3e-47db-9c2e-a33c98f518b7","Type":"ContainerStarted","Data":"7c5a69f68ad9d3334361f5d0365be0291250861e84a005d80c0766aff52d321b"} Sep 29 19:24:32 crc kubenswrapper[4779]: I0929 19:24:32.730655 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/a214376c-8f64-4f89-9354-14de32e2f17f-etc-swift\") pod \"swift-storage-0\" (UID: \"a214376c-8f64-4f89-9354-14de32e2f17f\") " pod="openstack/swift-storage-0" Sep 29 19:24:32 crc kubenswrapper[4779]: E0929 19:24:32.730983 4779 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Sep 29 19:24:32 crc kubenswrapper[4779]: E0929 19:24:32.731034 4779 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Sep 29 19:24:32 crc kubenswrapper[4779]: E0929 19:24:32.731144 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/a214376c-8f64-4f89-9354-14de32e2f17f-etc-swift podName:a214376c-8f64-4f89-9354-14de32e2f17f nodeName:}" failed. No retries permitted until 2025-09-29 19:24:36.731109399 +0000 UTC m=+987.615534539 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/a214376c-8f64-4f89-9354-14de32e2f17f-etc-swift") pod "swift-storage-0" (UID: "a214376c-8f64-4f89-9354-14de32e2f17f") : configmap "swift-ring-files" not found Sep 29 19:24:32 crc kubenswrapper[4779]: I0929 19:24:32.753278 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-ring-rebalance-wzdw7"] Sep 29 19:24:32 crc kubenswrapper[4779]: I0929 19:24:32.754804 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-wzdw7" Sep 29 19:24:32 crc kubenswrapper[4779]: I0929 19:24:32.764193 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-scripts" Sep 29 19:24:32 crc kubenswrapper[4779]: I0929 19:24:32.764532 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-config-data" Sep 29 19:24:32 crc kubenswrapper[4779]: I0929 19:24:32.765605 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-proxy-config-data" Sep 29 19:24:32 crc kubenswrapper[4779]: I0929 19:24:32.773000 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-ring-rebalance-wzdw7"] Sep 29 19:24:32 crc kubenswrapper[4779]: I0929 19:24:32.832683 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w8xxc\" (UniqueName: \"kubernetes.io/projected/587857be-cc5b-43cb-bf66-d9e7aadcc587-kube-api-access-w8xxc\") pod \"swift-ring-rebalance-wzdw7\" (UID: \"587857be-cc5b-43cb-bf66-d9e7aadcc587\") " pod="openstack/swift-ring-rebalance-wzdw7" Sep 29 19:24:32 crc kubenswrapper[4779]: I0929 19:24:32.832750 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/587857be-cc5b-43cb-bf66-d9e7aadcc587-etc-swift\") pod \"swift-ring-rebalance-wzdw7\" (UID: \"587857be-cc5b-43cb-bf66-d9e7aadcc587\") " pod="openstack/swift-ring-rebalance-wzdw7" Sep 29 19:24:32 crc kubenswrapper[4779]: I0929 19:24:32.832837 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/587857be-cc5b-43cb-bf66-d9e7aadcc587-dispersionconf\") pod \"swift-ring-rebalance-wzdw7\" (UID: \"587857be-cc5b-43cb-bf66-d9e7aadcc587\") " pod="openstack/swift-ring-rebalance-wzdw7" Sep 29 19:24:32 crc kubenswrapper[4779]: I0929 19:24:32.832884 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/587857be-cc5b-43cb-bf66-d9e7aadcc587-combined-ca-bundle\") pod \"swift-ring-rebalance-wzdw7\" (UID: \"587857be-cc5b-43cb-bf66-d9e7aadcc587\") " pod="openstack/swift-ring-rebalance-wzdw7" Sep 29 19:24:32 crc kubenswrapper[4779]: I0929 19:24:32.832913 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/587857be-cc5b-43cb-bf66-d9e7aadcc587-swiftconf\") pod \"swift-ring-rebalance-wzdw7\" (UID: \"587857be-cc5b-43cb-bf66-d9e7aadcc587\") " pod="openstack/swift-ring-rebalance-wzdw7" Sep 29 19:24:32 crc kubenswrapper[4779]: I0929 19:24:32.832939 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/587857be-cc5b-43cb-bf66-d9e7aadcc587-scripts\") pod \"swift-ring-rebalance-wzdw7\" (UID: \"587857be-cc5b-43cb-bf66-d9e7aadcc587\") " pod="openstack/swift-ring-rebalance-wzdw7" Sep 29 19:24:32 crc kubenswrapper[4779]: I0929 19:24:32.832961 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/587857be-cc5b-43cb-bf66-d9e7aadcc587-ring-data-devices\") pod \"swift-ring-rebalance-wzdw7\" (UID: \"587857be-cc5b-43cb-bf66-d9e7aadcc587\") " pod="openstack/swift-ring-rebalance-wzdw7" Sep 29 19:24:32 crc kubenswrapper[4779]: I0929 19:24:32.899527 4779 generic.go:334] "Generic (PLEG): container finished" podID="d04bbeb8-1f3e-47db-9c2e-a33c98f518b7" containerID="488253d205161a35bcda5d2176cd378696eb78fba9baf3e497370a51674ba108" exitCode=0 Sep 29 19:24:32 crc kubenswrapper[4779]: I0929 19:24:32.899590 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-df4sn" event={"ID":"d04bbeb8-1f3e-47db-9c2e-a33c98f518b7","Type":"ContainerDied","Data":"488253d205161a35bcda5d2176cd378696eb78fba9baf3e497370a51674ba108"} Sep 29 19:24:32 crc kubenswrapper[4779]: I0929 19:24:32.901733 4779 generic.go:334] "Generic (PLEG): container finished" podID="7b7daa08-8885-418b-bff6-582ebb727b73" containerID="07d31420cf1c45d6f2bfcad4a0187d984d83498f9cef86c30640a0f64c1370e7" exitCode=0 Sep 29 19:24:32 crc kubenswrapper[4779]: I0929 19:24:32.901835 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-698758b865-lww4s" event={"ID":"7b7daa08-8885-418b-bff6-582ebb727b73","Type":"ContainerDied","Data":"07d31420cf1c45d6f2bfcad4a0187d984d83498f9cef86c30640a0f64c1370e7"} Sep 29 19:24:32 crc kubenswrapper[4779]: I0929 19:24:32.903290 4779 generic.go:334] "Generic (PLEG): container finished" podID="d8365cf2-c10d-40a2-9bfb-1a386175d137" containerID="6ec2e0e43972651fadf2f1cda510284be6b29c03dd9312f38660af2b18c13af9" exitCode=0 Sep 29 19:24:32 crc kubenswrapper[4779]: I0929 19:24:32.903422 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-4q8h8" event={"ID":"d8365cf2-c10d-40a2-9bfb-1a386175d137","Type":"ContainerDied","Data":"6ec2e0e43972651fadf2f1cda510284be6b29c03dd9312f38660af2b18c13af9"} Sep 29 19:24:32 crc kubenswrapper[4779]: I0929 19:24:32.905245 4779 generic.go:334] "Generic (PLEG): container finished" podID="9a729519-3dac-4480-a83a-99f5fb79a284" containerID="b3aa197877ec7e530c30943503880e07fa9de4bc59f8b25788504d3932934546" exitCode=0 Sep 29 19:24:32 crc kubenswrapper[4779]: I0929 19:24:32.905296 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-kzm6s" event={"ID":"9a729519-3dac-4480-a83a-99f5fb79a284","Type":"ContainerDied","Data":"b3aa197877ec7e530c30943503880e07fa9de4bc59f8b25788504d3932934546"} Sep 29 19:24:32 crc kubenswrapper[4779]: I0929 19:24:32.934299 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/587857be-cc5b-43cb-bf66-d9e7aadcc587-dispersionconf\") pod \"swift-ring-rebalance-wzdw7\" (UID: \"587857be-cc5b-43cb-bf66-d9e7aadcc587\") " pod="openstack/swift-ring-rebalance-wzdw7" Sep 29 19:24:32 crc kubenswrapper[4779]: I0929 19:24:32.934396 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/587857be-cc5b-43cb-bf66-d9e7aadcc587-combined-ca-bundle\") pod \"swift-ring-rebalance-wzdw7\" (UID: \"587857be-cc5b-43cb-bf66-d9e7aadcc587\") " pod="openstack/swift-ring-rebalance-wzdw7" Sep 29 19:24:32 crc kubenswrapper[4779]: I0929 19:24:32.934426 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/587857be-cc5b-43cb-bf66-d9e7aadcc587-swiftconf\") pod \"swift-ring-rebalance-wzdw7\" (UID: \"587857be-cc5b-43cb-bf66-d9e7aadcc587\") " pod="openstack/swift-ring-rebalance-wzdw7" Sep 29 19:24:32 crc kubenswrapper[4779]: I0929 19:24:32.934485 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/587857be-cc5b-43cb-bf66-d9e7aadcc587-scripts\") pod \"swift-ring-rebalance-wzdw7\" (UID: \"587857be-cc5b-43cb-bf66-d9e7aadcc587\") " pod="openstack/swift-ring-rebalance-wzdw7" Sep 29 19:24:32 crc kubenswrapper[4779]: I0929 19:24:32.934534 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/587857be-cc5b-43cb-bf66-d9e7aadcc587-ring-data-devices\") pod \"swift-ring-rebalance-wzdw7\" (UID: \"587857be-cc5b-43cb-bf66-d9e7aadcc587\") " pod="openstack/swift-ring-rebalance-wzdw7" Sep 29 19:24:32 crc kubenswrapper[4779]: I0929 19:24:32.934681 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w8xxc\" (UniqueName: \"kubernetes.io/projected/587857be-cc5b-43cb-bf66-d9e7aadcc587-kube-api-access-w8xxc\") pod \"swift-ring-rebalance-wzdw7\" (UID: \"587857be-cc5b-43cb-bf66-d9e7aadcc587\") " pod="openstack/swift-ring-rebalance-wzdw7" Sep 29 19:24:32 crc kubenswrapper[4779]: I0929 19:24:32.934717 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/587857be-cc5b-43cb-bf66-d9e7aadcc587-etc-swift\") pod \"swift-ring-rebalance-wzdw7\" (UID: \"587857be-cc5b-43cb-bf66-d9e7aadcc587\") " pod="openstack/swift-ring-rebalance-wzdw7" Sep 29 19:24:32 crc kubenswrapper[4779]: I0929 19:24:32.935141 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/587857be-cc5b-43cb-bf66-d9e7aadcc587-etc-swift\") pod \"swift-ring-rebalance-wzdw7\" (UID: \"587857be-cc5b-43cb-bf66-d9e7aadcc587\") " pod="openstack/swift-ring-rebalance-wzdw7" Sep 29 19:24:32 crc kubenswrapper[4779]: I0929 19:24:32.943189 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/587857be-cc5b-43cb-bf66-d9e7aadcc587-dispersionconf\") pod \"swift-ring-rebalance-wzdw7\" (UID: \"587857be-cc5b-43cb-bf66-d9e7aadcc587\") " pod="openstack/swift-ring-rebalance-wzdw7" Sep 29 19:24:32 crc kubenswrapper[4779]: I0929 19:24:32.943202 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/587857be-cc5b-43cb-bf66-d9e7aadcc587-swiftconf\") pod \"swift-ring-rebalance-wzdw7\" (UID: \"587857be-cc5b-43cb-bf66-d9e7aadcc587\") " pod="openstack/swift-ring-rebalance-wzdw7" Sep 29 19:24:32 crc kubenswrapper[4779]: I0929 19:24:32.943295 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/587857be-cc5b-43cb-bf66-d9e7aadcc587-scripts\") pod \"swift-ring-rebalance-wzdw7\" (UID: \"587857be-cc5b-43cb-bf66-d9e7aadcc587\") " pod="openstack/swift-ring-rebalance-wzdw7" Sep 29 19:24:32 crc kubenswrapper[4779]: I0929 19:24:32.943448 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/587857be-cc5b-43cb-bf66-d9e7aadcc587-ring-data-devices\") pod \"swift-ring-rebalance-wzdw7\" (UID: \"587857be-cc5b-43cb-bf66-d9e7aadcc587\") " pod="openstack/swift-ring-rebalance-wzdw7" Sep 29 19:24:32 crc kubenswrapper[4779]: I0929 19:24:32.951745 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/587857be-cc5b-43cb-bf66-d9e7aadcc587-combined-ca-bundle\") pod \"swift-ring-rebalance-wzdw7\" (UID: \"587857be-cc5b-43cb-bf66-d9e7aadcc587\") " pod="openstack/swift-ring-rebalance-wzdw7" Sep 29 19:24:32 crc kubenswrapper[4779]: I0929 19:24:32.959797 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w8xxc\" (UniqueName: \"kubernetes.io/projected/587857be-cc5b-43cb-bf66-d9e7aadcc587-kube-api-access-w8xxc\") pod \"swift-ring-rebalance-wzdw7\" (UID: \"587857be-cc5b-43cb-bf66-d9e7aadcc587\") " pod="openstack/swift-ring-rebalance-wzdw7" Sep 29 19:24:33 crc kubenswrapper[4779]: I0929 19:24:33.069965 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-wzdw7" Sep 29 19:24:33 crc kubenswrapper[4779]: I0929 19:24:33.548615 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-ring-rebalance-wzdw7"] Sep 29 19:24:33 crc kubenswrapper[4779]: W0929 19:24:33.552633 4779 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod587857be_cc5b_43cb_bf66_d9e7aadcc587.slice/crio-570c87f99f9d79c266c4730ab8d4ff3d23dcd7b9be070ce5e6a871518afd0555 WatchSource:0}: Error finding container 570c87f99f9d79c266c4730ab8d4ff3d23dcd7b9be070ce5e6a871518afd0555: Status 404 returned error can't find the container with id 570c87f99f9d79c266c4730ab8d4ff3d23dcd7b9be070ce5e6a871518afd0555 Sep 29 19:24:33 crc kubenswrapper[4779]: I0929 19:24:33.919558 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-wzdw7" event={"ID":"587857be-cc5b-43cb-bf66-d9e7aadcc587","Type":"ContainerStarted","Data":"570c87f99f9d79c266c4730ab8d4ff3d23dcd7b9be070ce5e6a871518afd0555"} Sep 29 19:24:33 crc kubenswrapper[4779]: I0929 19:24:33.924431 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-698758b865-lww4s" event={"ID":"7b7daa08-8885-418b-bff6-582ebb727b73","Type":"ContainerStarted","Data":"1377035065f0110ebd717796769b740e2864a55c93408ade32341b22f1fb7a77"} Sep 29 19:24:33 crc kubenswrapper[4779]: I0929 19:24:33.962220 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-698758b865-lww4s" podStartSLOduration=6.962186762 podStartE2EDuration="6.962186762s" podCreationTimestamp="2025-09-29 19:24:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 19:24:33.94846886 +0000 UTC m=+984.832894000" watchObservedRunningTime="2025-09-29 19:24:33.962186762 +0000 UTC m=+984.846611902" Sep 29 19:24:34 crc kubenswrapper[4779]: I0929 19:24:34.341532 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-kzm6s" Sep 29 19:24:34 crc kubenswrapper[4779]: I0929 19:24:34.464986 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcm2t\" (UniqueName: \"kubernetes.io/projected/9a729519-3dac-4480-a83a-99f5fb79a284-kube-api-access-xcm2t\") pod \"9a729519-3dac-4480-a83a-99f5fb79a284\" (UID: \"9a729519-3dac-4480-a83a-99f5fb79a284\") " Sep 29 19:24:34 crc kubenswrapper[4779]: I0929 19:24:34.470981 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9a729519-3dac-4480-a83a-99f5fb79a284-kube-api-access-xcm2t" (OuterVolumeSpecName: "kube-api-access-xcm2t") pod "9a729519-3dac-4480-a83a-99f5fb79a284" (UID: "9a729519-3dac-4480-a83a-99f5fb79a284"). InnerVolumeSpecName "kube-api-access-xcm2t". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:24:34 crc kubenswrapper[4779]: I0929 19:24:34.497855 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-4q8h8" Sep 29 19:24:34 crc kubenswrapper[4779]: I0929 19:24:34.504674 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-df4sn" Sep 29 19:24:34 crc kubenswrapper[4779]: I0929 19:24:34.565923 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-45vtq\" (UniqueName: \"kubernetes.io/projected/d8365cf2-c10d-40a2-9bfb-1a386175d137-kube-api-access-45vtq\") pod \"d8365cf2-c10d-40a2-9bfb-1a386175d137\" (UID: \"d8365cf2-c10d-40a2-9bfb-1a386175d137\") " Sep 29 19:24:34 crc kubenswrapper[4779]: I0929 19:24:34.566615 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcm2t\" (UniqueName: \"kubernetes.io/projected/9a729519-3dac-4480-a83a-99f5fb79a284-kube-api-access-xcm2t\") on node \"crc\" DevicePath \"\"" Sep 29 19:24:34 crc kubenswrapper[4779]: I0929 19:24:34.570615 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d8365cf2-c10d-40a2-9bfb-1a386175d137-kube-api-access-45vtq" (OuterVolumeSpecName: "kube-api-access-45vtq") pod "d8365cf2-c10d-40a2-9bfb-1a386175d137" (UID: "d8365cf2-c10d-40a2-9bfb-1a386175d137"). InnerVolumeSpecName "kube-api-access-45vtq". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:24:34 crc kubenswrapper[4779]: I0929 19:24:34.667547 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-56mzj\" (UniqueName: \"kubernetes.io/projected/d04bbeb8-1f3e-47db-9c2e-a33c98f518b7-kube-api-access-56mzj\") pod \"d04bbeb8-1f3e-47db-9c2e-a33c98f518b7\" (UID: \"d04bbeb8-1f3e-47db-9c2e-a33c98f518b7\") " Sep 29 19:24:34 crc kubenswrapper[4779]: I0929 19:24:34.668120 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-45vtq\" (UniqueName: \"kubernetes.io/projected/d8365cf2-c10d-40a2-9bfb-1a386175d137-kube-api-access-45vtq\") on node \"crc\" DevicePath \"\"" Sep 29 19:24:34 crc kubenswrapper[4779]: I0929 19:24:34.675954 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d04bbeb8-1f3e-47db-9c2e-a33c98f518b7-kube-api-access-56mzj" (OuterVolumeSpecName: "kube-api-access-56mzj") pod "d04bbeb8-1f3e-47db-9c2e-a33c98f518b7" (UID: "d04bbeb8-1f3e-47db-9c2e-a33c98f518b7"). InnerVolumeSpecName "kube-api-access-56mzj". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:24:34 crc kubenswrapper[4779]: I0929 19:24:34.769767 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-56mzj\" (UniqueName: \"kubernetes.io/projected/d04bbeb8-1f3e-47db-9c2e-a33c98f518b7-kube-api-access-56mzj\") on node \"crc\" DevicePath \"\"" Sep 29 19:24:34 crc kubenswrapper[4779]: I0929 19:24:34.932637 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-kzm6s" Sep 29 19:24:34 crc kubenswrapper[4779]: I0929 19:24:34.932652 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-kzm6s" event={"ID":"9a729519-3dac-4480-a83a-99f5fb79a284","Type":"ContainerDied","Data":"fa3ebfd215fc507638fb04f26345f06c4da794b759c1a1cf8bf706b9a4bfb36f"} Sep 29 19:24:34 crc kubenswrapper[4779]: I0929 19:24:34.932698 4779 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="fa3ebfd215fc507638fb04f26345f06c4da794b759c1a1cf8bf706b9a4bfb36f" Sep 29 19:24:34 crc kubenswrapper[4779]: I0929 19:24:34.934874 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-df4sn" event={"ID":"d04bbeb8-1f3e-47db-9c2e-a33c98f518b7","Type":"ContainerDied","Data":"7c5a69f68ad9d3334361f5d0365be0291250861e84a005d80c0766aff52d321b"} Sep 29 19:24:34 crc kubenswrapper[4779]: I0929 19:24:34.934952 4779 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7c5a69f68ad9d3334361f5d0365be0291250861e84a005d80c0766aff52d321b" Sep 29 19:24:34 crc kubenswrapper[4779]: I0929 19:24:34.935011 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-df4sn" Sep 29 19:24:34 crc kubenswrapper[4779]: I0929 19:24:34.946810 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-4q8h8" Sep 29 19:24:34 crc kubenswrapper[4779]: I0929 19:24:34.947028 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-4q8h8" event={"ID":"d8365cf2-c10d-40a2-9bfb-1a386175d137","Type":"ContainerDied","Data":"ea4490718ea82c70a94d60b529853cc28bc07c078d536f5f3d74f59e9dba0d21"} Sep 29 19:24:34 crc kubenswrapper[4779]: I0929 19:24:34.947097 4779 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ea4490718ea82c70a94d60b529853cc28bc07c078d536f5f3d74f59e9dba0d21" Sep 29 19:24:34 crc kubenswrapper[4779]: I0929 19:24:34.947136 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-698758b865-lww4s" Sep 29 19:24:36 crc kubenswrapper[4779]: I0929 19:24:36.607475 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-northd-0" Sep 29 19:24:36 crc kubenswrapper[4779]: I0929 19:24:36.805241 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/a214376c-8f64-4f89-9354-14de32e2f17f-etc-swift\") pod \"swift-storage-0\" (UID: \"a214376c-8f64-4f89-9354-14de32e2f17f\") " pod="openstack/swift-storage-0" Sep 29 19:24:36 crc kubenswrapper[4779]: E0929 19:24:36.805542 4779 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Sep 29 19:24:36 crc kubenswrapper[4779]: E0929 19:24:36.805577 4779 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Sep 29 19:24:36 crc kubenswrapper[4779]: E0929 19:24:36.805646 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/a214376c-8f64-4f89-9354-14de32e2f17f-etc-swift podName:a214376c-8f64-4f89-9354-14de32e2f17f nodeName:}" failed. No retries permitted until 2025-09-29 19:24:44.805626488 +0000 UTC m=+995.690051608 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/a214376c-8f64-4f89-9354-14de32e2f17f-etc-swift") pod "swift-storage-0" (UID: "a214376c-8f64-4f89-9354-14de32e2f17f") : configmap "swift-ring-files" not found Sep 29 19:24:36 crc kubenswrapper[4779]: I0929 19:24:36.965596 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-wzdw7" event={"ID":"587857be-cc5b-43cb-bf66-d9e7aadcc587","Type":"ContainerStarted","Data":"864512b1b1159c5fc6402ee29d34cb0bac194dc788f5ec10590304ee6edc892b"} Sep 29 19:24:36 crc kubenswrapper[4779]: I0929 19:24:36.996887 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-ring-rebalance-wzdw7" podStartSLOduration=2.013206427 podStartE2EDuration="4.996860708s" podCreationTimestamp="2025-09-29 19:24:32 +0000 UTC" firstStartedPulling="2025-09-29 19:24:33.554705873 +0000 UTC m=+984.439131003" lastFinishedPulling="2025-09-29 19:24:36.538360184 +0000 UTC m=+987.422785284" observedRunningTime="2025-09-29 19:24:36.989857958 +0000 UTC m=+987.874283108" watchObservedRunningTime="2025-09-29 19:24:36.996860708 +0000 UTC m=+987.881285848" Sep 29 19:24:38 crc kubenswrapper[4779]: I0929 19:24:38.028540 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-698758b865-lww4s" Sep 29 19:24:38 crc kubenswrapper[4779]: I0929 19:24:38.116304 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-86db49b7ff-85qrs"] Sep 29 19:24:38 crc kubenswrapper[4779]: I0929 19:24:38.116618 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-86db49b7ff-85qrs" podUID="c23ebfd4-1d4f-45fc-9c53-888ce644ad45" containerName="dnsmasq-dns" containerID="cri-o://9b2ded76b6b27b6fc82327f10134fd0a6ab8197cb2869808c2baace2847964b7" gracePeriod=10 Sep 29 19:24:38 crc kubenswrapper[4779]: I0929 19:24:38.541656 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-86db49b7ff-85qrs" Sep 29 19:24:38 crc kubenswrapper[4779]: I0929 19:24:38.651290 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c23ebfd4-1d4f-45fc-9c53-888ce644ad45-dns-svc\") pod \"c23ebfd4-1d4f-45fc-9c53-888ce644ad45\" (UID: \"c23ebfd4-1d4f-45fc-9c53-888ce644ad45\") " Sep 29 19:24:38 crc kubenswrapper[4779]: I0929 19:24:38.651411 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c23ebfd4-1d4f-45fc-9c53-888ce644ad45-ovsdbserver-sb\") pod \"c23ebfd4-1d4f-45fc-9c53-888ce644ad45\" (UID: \"c23ebfd4-1d4f-45fc-9c53-888ce644ad45\") " Sep 29 19:24:38 crc kubenswrapper[4779]: I0929 19:24:38.651499 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nwddk\" (UniqueName: \"kubernetes.io/projected/c23ebfd4-1d4f-45fc-9c53-888ce644ad45-kube-api-access-nwddk\") pod \"c23ebfd4-1d4f-45fc-9c53-888ce644ad45\" (UID: \"c23ebfd4-1d4f-45fc-9c53-888ce644ad45\") " Sep 29 19:24:38 crc kubenswrapper[4779]: I0929 19:24:38.651526 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c23ebfd4-1d4f-45fc-9c53-888ce644ad45-config\") pod \"c23ebfd4-1d4f-45fc-9c53-888ce644ad45\" (UID: \"c23ebfd4-1d4f-45fc-9c53-888ce644ad45\") " Sep 29 19:24:38 crc kubenswrapper[4779]: I0929 19:24:38.651545 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c23ebfd4-1d4f-45fc-9c53-888ce644ad45-ovsdbserver-nb\") pod \"c23ebfd4-1d4f-45fc-9c53-888ce644ad45\" (UID: \"c23ebfd4-1d4f-45fc-9c53-888ce644ad45\") " Sep 29 19:24:38 crc kubenswrapper[4779]: I0929 19:24:38.666464 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c23ebfd4-1d4f-45fc-9c53-888ce644ad45-kube-api-access-nwddk" (OuterVolumeSpecName: "kube-api-access-nwddk") pod "c23ebfd4-1d4f-45fc-9c53-888ce644ad45" (UID: "c23ebfd4-1d4f-45fc-9c53-888ce644ad45"). InnerVolumeSpecName "kube-api-access-nwddk". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:24:38 crc kubenswrapper[4779]: I0929 19:24:38.703176 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c23ebfd4-1d4f-45fc-9c53-888ce644ad45-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "c23ebfd4-1d4f-45fc-9c53-888ce644ad45" (UID: "c23ebfd4-1d4f-45fc-9c53-888ce644ad45"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:24:38 crc kubenswrapper[4779]: I0929 19:24:38.704680 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c23ebfd4-1d4f-45fc-9c53-888ce644ad45-config" (OuterVolumeSpecName: "config") pod "c23ebfd4-1d4f-45fc-9c53-888ce644ad45" (UID: "c23ebfd4-1d4f-45fc-9c53-888ce644ad45"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:24:38 crc kubenswrapper[4779]: I0929 19:24:38.707073 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c23ebfd4-1d4f-45fc-9c53-888ce644ad45-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "c23ebfd4-1d4f-45fc-9c53-888ce644ad45" (UID: "c23ebfd4-1d4f-45fc-9c53-888ce644ad45"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:24:38 crc kubenswrapper[4779]: I0929 19:24:38.707138 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c23ebfd4-1d4f-45fc-9c53-888ce644ad45-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "c23ebfd4-1d4f-45fc-9c53-888ce644ad45" (UID: "c23ebfd4-1d4f-45fc-9c53-888ce644ad45"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:24:38 crc kubenswrapper[4779]: I0929 19:24:38.764168 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nwddk\" (UniqueName: \"kubernetes.io/projected/c23ebfd4-1d4f-45fc-9c53-888ce644ad45-kube-api-access-nwddk\") on node \"crc\" DevicePath \"\"" Sep 29 19:24:38 crc kubenswrapper[4779]: I0929 19:24:38.764205 4779 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c23ebfd4-1d4f-45fc-9c53-888ce644ad45-config\") on node \"crc\" DevicePath \"\"" Sep 29 19:24:38 crc kubenswrapper[4779]: I0929 19:24:38.764214 4779 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c23ebfd4-1d4f-45fc-9c53-888ce644ad45-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Sep 29 19:24:38 crc kubenswrapper[4779]: I0929 19:24:38.764223 4779 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c23ebfd4-1d4f-45fc-9c53-888ce644ad45-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 29 19:24:38 crc kubenswrapper[4779]: I0929 19:24:38.764366 4779 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c23ebfd4-1d4f-45fc-9c53-888ce644ad45-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Sep 29 19:24:38 crc kubenswrapper[4779]: I0929 19:24:38.987263 4779 generic.go:334] "Generic (PLEG): container finished" podID="c23ebfd4-1d4f-45fc-9c53-888ce644ad45" containerID="9b2ded76b6b27b6fc82327f10134fd0a6ab8197cb2869808c2baace2847964b7" exitCode=0 Sep 29 19:24:38 crc kubenswrapper[4779]: I0929 19:24:38.987353 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-86db49b7ff-85qrs" event={"ID":"c23ebfd4-1d4f-45fc-9c53-888ce644ad45","Type":"ContainerDied","Data":"9b2ded76b6b27b6fc82327f10134fd0a6ab8197cb2869808c2baace2847964b7"} Sep 29 19:24:38 crc kubenswrapper[4779]: I0929 19:24:38.987400 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-86db49b7ff-85qrs" Sep 29 19:24:38 crc kubenswrapper[4779]: I0929 19:24:38.987423 4779 scope.go:117] "RemoveContainer" containerID="9b2ded76b6b27b6fc82327f10134fd0a6ab8197cb2869808c2baace2847964b7" Sep 29 19:24:38 crc kubenswrapper[4779]: I0929 19:24:38.987410 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-86db49b7ff-85qrs" event={"ID":"c23ebfd4-1d4f-45fc-9c53-888ce644ad45","Type":"ContainerDied","Data":"2aee646e9b14e2fdc6cd9b8a5e13d76065b925751d2a87e907c8c4342484e505"} Sep 29 19:24:39 crc kubenswrapper[4779]: I0929 19:24:39.033565 4779 scope.go:117] "RemoveContainer" containerID="f48f5a5b701bdcbb19d779ff7561db79e60791a1a724e8601585c5e0b0bc81d9" Sep 29 19:24:39 crc kubenswrapper[4779]: I0929 19:24:39.038269 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-86db49b7ff-85qrs"] Sep 29 19:24:39 crc kubenswrapper[4779]: I0929 19:24:39.049174 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-86db49b7ff-85qrs"] Sep 29 19:24:39 crc kubenswrapper[4779]: I0929 19:24:39.059064 4779 scope.go:117] "RemoveContainer" containerID="9b2ded76b6b27b6fc82327f10134fd0a6ab8197cb2869808c2baace2847964b7" Sep 29 19:24:39 crc kubenswrapper[4779]: E0929 19:24:39.059650 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9b2ded76b6b27b6fc82327f10134fd0a6ab8197cb2869808c2baace2847964b7\": container with ID starting with 9b2ded76b6b27b6fc82327f10134fd0a6ab8197cb2869808c2baace2847964b7 not found: ID does not exist" containerID="9b2ded76b6b27b6fc82327f10134fd0a6ab8197cb2869808c2baace2847964b7" Sep 29 19:24:39 crc kubenswrapper[4779]: I0929 19:24:39.059705 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9b2ded76b6b27b6fc82327f10134fd0a6ab8197cb2869808c2baace2847964b7"} err="failed to get container status \"9b2ded76b6b27b6fc82327f10134fd0a6ab8197cb2869808c2baace2847964b7\": rpc error: code = NotFound desc = could not find container \"9b2ded76b6b27b6fc82327f10134fd0a6ab8197cb2869808c2baace2847964b7\": container with ID starting with 9b2ded76b6b27b6fc82327f10134fd0a6ab8197cb2869808c2baace2847964b7 not found: ID does not exist" Sep 29 19:24:39 crc kubenswrapper[4779]: I0929 19:24:39.059749 4779 scope.go:117] "RemoveContainer" containerID="f48f5a5b701bdcbb19d779ff7561db79e60791a1a724e8601585c5e0b0bc81d9" Sep 29 19:24:39 crc kubenswrapper[4779]: E0929 19:24:39.066395 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f48f5a5b701bdcbb19d779ff7561db79e60791a1a724e8601585c5e0b0bc81d9\": container with ID starting with f48f5a5b701bdcbb19d779ff7561db79e60791a1a724e8601585c5e0b0bc81d9 not found: ID does not exist" containerID="f48f5a5b701bdcbb19d779ff7561db79e60791a1a724e8601585c5e0b0bc81d9" Sep 29 19:24:39 crc kubenswrapper[4779]: I0929 19:24:39.066466 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f48f5a5b701bdcbb19d779ff7561db79e60791a1a724e8601585c5e0b0bc81d9"} err="failed to get container status \"f48f5a5b701bdcbb19d779ff7561db79e60791a1a724e8601585c5e0b0bc81d9\": rpc error: code = NotFound desc = could not find container \"f48f5a5b701bdcbb19d779ff7561db79e60791a1a724e8601585c5e0b0bc81d9\": container with ID starting with f48f5a5b701bdcbb19d779ff7561db79e60791a1a724e8601585c5e0b0bc81d9 not found: ID does not exist" Sep 29 19:24:39 crc kubenswrapper[4779]: I0929 19:24:39.784716 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c23ebfd4-1d4f-45fc-9c53-888ce644ad45" path="/var/lib/kubelet/pods/c23ebfd4-1d4f-45fc-9c53-888ce644ad45/volumes" Sep 29 19:24:40 crc kubenswrapper[4779]: I0929 19:24:40.958235 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-4816-account-create-td7m9"] Sep 29 19:24:40 crc kubenswrapper[4779]: E0929 19:24:40.958973 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d04bbeb8-1f3e-47db-9c2e-a33c98f518b7" containerName="mariadb-database-create" Sep 29 19:24:40 crc kubenswrapper[4779]: I0929 19:24:40.959004 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="d04bbeb8-1f3e-47db-9c2e-a33c98f518b7" containerName="mariadb-database-create" Sep 29 19:24:40 crc kubenswrapper[4779]: E0929 19:24:40.959033 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c23ebfd4-1d4f-45fc-9c53-888ce644ad45" containerName="dnsmasq-dns" Sep 29 19:24:40 crc kubenswrapper[4779]: I0929 19:24:40.959053 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="c23ebfd4-1d4f-45fc-9c53-888ce644ad45" containerName="dnsmasq-dns" Sep 29 19:24:40 crc kubenswrapper[4779]: E0929 19:24:40.959074 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d8365cf2-c10d-40a2-9bfb-1a386175d137" containerName="mariadb-database-create" Sep 29 19:24:40 crc kubenswrapper[4779]: I0929 19:24:40.959093 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="d8365cf2-c10d-40a2-9bfb-1a386175d137" containerName="mariadb-database-create" Sep 29 19:24:40 crc kubenswrapper[4779]: E0929 19:24:40.959137 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9a729519-3dac-4480-a83a-99f5fb79a284" containerName="mariadb-database-create" Sep 29 19:24:40 crc kubenswrapper[4779]: I0929 19:24:40.959155 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="9a729519-3dac-4480-a83a-99f5fb79a284" containerName="mariadb-database-create" Sep 29 19:24:40 crc kubenswrapper[4779]: E0929 19:24:40.959186 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c23ebfd4-1d4f-45fc-9c53-888ce644ad45" containerName="init" Sep 29 19:24:40 crc kubenswrapper[4779]: I0929 19:24:40.959203 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="c23ebfd4-1d4f-45fc-9c53-888ce644ad45" containerName="init" Sep 29 19:24:40 crc kubenswrapper[4779]: I0929 19:24:40.959613 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="c23ebfd4-1d4f-45fc-9c53-888ce644ad45" containerName="dnsmasq-dns" Sep 29 19:24:40 crc kubenswrapper[4779]: I0929 19:24:40.959667 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="9a729519-3dac-4480-a83a-99f5fb79a284" containerName="mariadb-database-create" Sep 29 19:24:40 crc kubenswrapper[4779]: I0929 19:24:40.959693 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="d04bbeb8-1f3e-47db-9c2e-a33c98f518b7" containerName="mariadb-database-create" Sep 29 19:24:40 crc kubenswrapper[4779]: I0929 19:24:40.959733 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="d8365cf2-c10d-40a2-9bfb-1a386175d137" containerName="mariadb-database-create" Sep 29 19:24:40 crc kubenswrapper[4779]: I0929 19:24:40.961012 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-4816-account-create-td7m9" Sep 29 19:24:40 crc kubenswrapper[4779]: I0929 19:24:40.964695 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-db-secret" Sep 29 19:24:40 crc kubenswrapper[4779]: I0929 19:24:40.966293 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-4816-account-create-td7m9"] Sep 29 19:24:41 crc kubenswrapper[4779]: I0929 19:24:41.113879 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r4m64\" (UniqueName: \"kubernetes.io/projected/ac185b21-9179-49ca-9034-24d9e28f3dd2-kube-api-access-r4m64\") pod \"glance-4816-account-create-td7m9\" (UID: \"ac185b21-9179-49ca-9034-24d9e28f3dd2\") " pod="openstack/glance-4816-account-create-td7m9" Sep 29 19:24:41 crc kubenswrapper[4779]: I0929 19:24:41.215853 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r4m64\" (UniqueName: \"kubernetes.io/projected/ac185b21-9179-49ca-9034-24d9e28f3dd2-kube-api-access-r4m64\") pod \"glance-4816-account-create-td7m9\" (UID: \"ac185b21-9179-49ca-9034-24d9e28f3dd2\") " pod="openstack/glance-4816-account-create-td7m9" Sep 29 19:24:41 crc kubenswrapper[4779]: I0929 19:24:41.246631 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r4m64\" (UniqueName: \"kubernetes.io/projected/ac185b21-9179-49ca-9034-24d9e28f3dd2-kube-api-access-r4m64\") pod \"glance-4816-account-create-td7m9\" (UID: \"ac185b21-9179-49ca-9034-24d9e28f3dd2\") " pod="openstack/glance-4816-account-create-td7m9" Sep 29 19:24:41 crc kubenswrapper[4779]: I0929 19:24:41.324428 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-4816-account-create-td7m9" Sep 29 19:24:41 crc kubenswrapper[4779]: I0929 19:24:41.839835 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-4816-account-create-td7m9"] Sep 29 19:24:41 crc kubenswrapper[4779]: W0929 19:24:41.848709 4779 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podac185b21_9179_49ca_9034_24d9e28f3dd2.slice/crio-49c21352149d6d3d80f6fab7ed7ea86d28114edb242aba83bef1b9e7d84bfb76 WatchSource:0}: Error finding container 49c21352149d6d3d80f6fab7ed7ea86d28114edb242aba83bef1b9e7d84bfb76: Status 404 returned error can't find the container with id 49c21352149d6d3d80f6fab7ed7ea86d28114edb242aba83bef1b9e7d84bfb76 Sep 29 19:24:42 crc kubenswrapper[4779]: I0929 19:24:42.048280 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-4816-account-create-td7m9" event={"ID":"ac185b21-9179-49ca-9034-24d9e28f3dd2","Type":"ContainerStarted","Data":"fc3bff118f4589e6778d2d9ec0a4c0ff2ecd71f1ac6b38bcbfc201f3f939cc45"} Sep 29 19:24:42 crc kubenswrapper[4779]: I0929 19:24:42.048734 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-4816-account-create-td7m9" event={"ID":"ac185b21-9179-49ca-9034-24d9e28f3dd2","Type":"ContainerStarted","Data":"49c21352149d6d3d80f6fab7ed7ea86d28114edb242aba83bef1b9e7d84bfb76"} Sep 29 19:24:42 crc kubenswrapper[4779]: I0929 19:24:42.072206 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-4816-account-create-td7m9" podStartSLOduration=2.072187529 podStartE2EDuration="2.072187529s" podCreationTimestamp="2025-09-29 19:24:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 19:24:42.067990445 +0000 UTC m=+992.952415585" watchObservedRunningTime="2025-09-29 19:24:42.072187529 +0000 UTC m=+992.956612629" Sep 29 19:24:43 crc kubenswrapper[4779]: I0929 19:24:43.061086 4779 generic.go:334] "Generic (PLEG): container finished" podID="ac185b21-9179-49ca-9034-24d9e28f3dd2" containerID="fc3bff118f4589e6778d2d9ec0a4c0ff2ecd71f1ac6b38bcbfc201f3f939cc45" exitCode=0 Sep 29 19:24:43 crc kubenswrapper[4779]: I0929 19:24:43.061131 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-4816-account-create-td7m9" event={"ID":"ac185b21-9179-49ca-9034-24d9e28f3dd2","Type":"ContainerDied","Data":"fc3bff118f4589e6778d2d9ec0a4c0ff2ecd71f1ac6b38bcbfc201f3f939cc45"} Sep 29 19:24:43 crc kubenswrapper[4779]: I0929 19:24:43.785855 4779 patch_prober.go:28] interesting pod/machine-config-daemon-d5cnr container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 19:24:43 crc kubenswrapper[4779]: I0929 19:24:43.785953 4779 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" podUID="476bc421-1113-455e-bcc8-e207e47dad19" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 19:24:44 crc kubenswrapper[4779]: I0929 19:24:44.071787 4779 generic.go:334] "Generic (PLEG): container finished" podID="549de7a5-30db-464d-bd6b-a6dcca25691d" containerID="debe7210bbb9acdaf59c8ce863e712cd0130f5fa1480136f51dd85b3f8966c43" exitCode=0 Sep 29 19:24:44 crc kubenswrapper[4779]: I0929 19:24:44.071886 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"549de7a5-30db-464d-bd6b-a6dcca25691d","Type":"ContainerDied","Data":"debe7210bbb9acdaf59c8ce863e712cd0130f5fa1480136f51dd85b3f8966c43"} Sep 29 19:24:44 crc kubenswrapper[4779]: I0929 19:24:44.077550 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-wzdw7" event={"ID":"587857be-cc5b-43cb-bf66-d9e7aadcc587","Type":"ContainerDied","Data":"864512b1b1159c5fc6402ee29d34cb0bac194dc788f5ec10590304ee6edc892b"} Sep 29 19:24:44 crc kubenswrapper[4779]: I0929 19:24:44.075369 4779 generic.go:334] "Generic (PLEG): container finished" podID="587857be-cc5b-43cb-bf66-d9e7aadcc587" containerID="864512b1b1159c5fc6402ee29d34cb0bac194dc788f5ec10590304ee6edc892b" exitCode=0 Sep 29 19:24:44 crc kubenswrapper[4779]: I0929 19:24:44.493770 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-4816-account-create-td7m9" Sep 29 19:24:44 crc kubenswrapper[4779]: I0929 19:24:44.581201 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-r4m64\" (UniqueName: \"kubernetes.io/projected/ac185b21-9179-49ca-9034-24d9e28f3dd2-kube-api-access-r4m64\") pod \"ac185b21-9179-49ca-9034-24d9e28f3dd2\" (UID: \"ac185b21-9179-49ca-9034-24d9e28f3dd2\") " Sep 29 19:24:44 crc kubenswrapper[4779]: I0929 19:24:44.587741 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ac185b21-9179-49ca-9034-24d9e28f3dd2-kube-api-access-r4m64" (OuterVolumeSpecName: "kube-api-access-r4m64") pod "ac185b21-9179-49ca-9034-24d9e28f3dd2" (UID: "ac185b21-9179-49ca-9034-24d9e28f3dd2"). InnerVolumeSpecName "kube-api-access-r4m64". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:24:44 crc kubenswrapper[4779]: I0929 19:24:44.683762 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-r4m64\" (UniqueName: \"kubernetes.io/projected/ac185b21-9179-49ca-9034-24d9e28f3dd2-kube-api-access-r4m64\") on node \"crc\" DevicePath \"\"" Sep 29 19:24:44 crc kubenswrapper[4779]: I0929 19:24:44.886438 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/a214376c-8f64-4f89-9354-14de32e2f17f-etc-swift\") pod \"swift-storage-0\" (UID: \"a214376c-8f64-4f89-9354-14de32e2f17f\") " pod="openstack/swift-storage-0" Sep 29 19:24:44 crc kubenswrapper[4779]: I0929 19:24:44.895090 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/a214376c-8f64-4f89-9354-14de32e2f17f-etc-swift\") pod \"swift-storage-0\" (UID: \"a214376c-8f64-4f89-9354-14de32e2f17f\") " pod="openstack/swift-storage-0" Sep 29 19:24:45 crc kubenswrapper[4779]: I0929 19:24:45.094663 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-4816-account-create-td7m9" event={"ID":"ac185b21-9179-49ca-9034-24d9e28f3dd2","Type":"ContainerDied","Data":"49c21352149d6d3d80f6fab7ed7ea86d28114edb242aba83bef1b9e7d84bfb76"} Sep 29 19:24:45 crc kubenswrapper[4779]: I0929 19:24:45.094705 4779 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="49c21352149d6d3d80f6fab7ed7ea86d28114edb242aba83bef1b9e7d84bfb76" Sep 29 19:24:45 crc kubenswrapper[4779]: I0929 19:24:45.094673 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-4816-account-create-td7m9" Sep 29 19:24:45 crc kubenswrapper[4779]: I0929 19:24:45.097615 4779 generic.go:334] "Generic (PLEG): container finished" podID="523fd020-2e02-4807-93b8-82ecbd1152eb" containerID="9776af2c324857cc31ce4e6ed8bdf415cc299e0676aa7f42511a5dd846f16c9c" exitCode=0 Sep 29 19:24:45 crc kubenswrapper[4779]: I0929 19:24:45.097658 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"523fd020-2e02-4807-93b8-82ecbd1152eb","Type":"ContainerDied","Data":"9776af2c324857cc31ce4e6ed8bdf415cc299e0676aa7f42511a5dd846f16c9c"} Sep 29 19:24:45 crc kubenswrapper[4779]: I0929 19:24:45.113663 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"549de7a5-30db-464d-bd6b-a6dcca25691d","Type":"ContainerStarted","Data":"769ca8e086cd076049a9814691cb9da9fc29915023965294dedbf3186a6b3bb5"} Sep 29 19:24:45 crc kubenswrapper[4779]: I0929 19:24:45.114800 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-cell1-server-0" Sep 29 19:24:45 crc kubenswrapper[4779]: I0929 19:24:45.155991 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-storage-0" Sep 29 19:24:45 crc kubenswrapper[4779]: I0929 19:24:45.157677 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-cell1-server-0" podStartSLOduration=48.537454768 podStartE2EDuration="55.157661233s" podCreationTimestamp="2025-09-29 19:23:50 +0000 UTC" firstStartedPulling="2025-09-29 19:24:05.419561562 +0000 UTC m=+956.303986672" lastFinishedPulling="2025-09-29 19:24:12.039768007 +0000 UTC m=+962.924193137" observedRunningTime="2025-09-29 19:24:45.14942775 +0000 UTC m=+996.033852850" watchObservedRunningTime="2025-09-29 19:24:45.157661233 +0000 UTC m=+996.042086323" Sep 29 19:24:45 crc kubenswrapper[4779]: I0929 19:24:45.318456 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-5bc9-account-create-dbrvg"] Sep 29 19:24:45 crc kubenswrapper[4779]: E0929 19:24:45.319051 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ac185b21-9179-49ca-9034-24d9e28f3dd2" containerName="mariadb-account-create" Sep 29 19:24:45 crc kubenswrapper[4779]: I0929 19:24:45.319063 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="ac185b21-9179-49ca-9034-24d9e28f3dd2" containerName="mariadb-account-create" Sep 29 19:24:45 crc kubenswrapper[4779]: I0929 19:24:45.319203 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="ac185b21-9179-49ca-9034-24d9e28f3dd2" containerName="mariadb-account-create" Sep 29 19:24:45 crc kubenswrapper[4779]: I0929 19:24:45.322929 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-5bc9-account-create-dbrvg" Sep 29 19:24:45 crc kubenswrapper[4779]: I0929 19:24:45.325342 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-db-secret" Sep 29 19:24:45 crc kubenswrapper[4779]: I0929 19:24:45.334722 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-5bc9-account-create-dbrvg"] Sep 29 19:24:45 crc kubenswrapper[4779]: I0929 19:24:45.396193 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-26bj8\" (UniqueName: \"kubernetes.io/projected/ba0df867-9aba-41a8-8359-4c93514a9115-kube-api-access-26bj8\") pod \"keystone-5bc9-account-create-dbrvg\" (UID: \"ba0df867-9aba-41a8-8359-4c93514a9115\") " pod="openstack/keystone-5bc9-account-create-dbrvg" Sep 29 19:24:45 crc kubenswrapper[4779]: I0929 19:24:45.463983 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-wzdw7" Sep 29 19:24:45 crc kubenswrapper[4779]: I0929 19:24:45.498546 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/587857be-cc5b-43cb-bf66-d9e7aadcc587-scripts\") pod \"587857be-cc5b-43cb-bf66-d9e7aadcc587\" (UID: \"587857be-cc5b-43cb-bf66-d9e7aadcc587\") " Sep 29 19:24:45 crc kubenswrapper[4779]: I0929 19:24:45.498663 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/587857be-cc5b-43cb-bf66-d9e7aadcc587-etc-swift\") pod \"587857be-cc5b-43cb-bf66-d9e7aadcc587\" (UID: \"587857be-cc5b-43cb-bf66-d9e7aadcc587\") " Sep 29 19:24:45 crc kubenswrapper[4779]: I0929 19:24:45.498690 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/587857be-cc5b-43cb-bf66-d9e7aadcc587-combined-ca-bundle\") pod \"587857be-cc5b-43cb-bf66-d9e7aadcc587\" (UID: \"587857be-cc5b-43cb-bf66-d9e7aadcc587\") " Sep 29 19:24:45 crc kubenswrapper[4779]: I0929 19:24:45.498767 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/587857be-cc5b-43cb-bf66-d9e7aadcc587-dispersionconf\") pod \"587857be-cc5b-43cb-bf66-d9e7aadcc587\" (UID: \"587857be-cc5b-43cb-bf66-d9e7aadcc587\") " Sep 29 19:24:45 crc kubenswrapper[4779]: I0929 19:24:45.498805 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/587857be-cc5b-43cb-bf66-d9e7aadcc587-ring-data-devices\") pod \"587857be-cc5b-43cb-bf66-d9e7aadcc587\" (UID: \"587857be-cc5b-43cb-bf66-d9e7aadcc587\") " Sep 29 19:24:45 crc kubenswrapper[4779]: I0929 19:24:45.498826 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w8xxc\" (UniqueName: \"kubernetes.io/projected/587857be-cc5b-43cb-bf66-d9e7aadcc587-kube-api-access-w8xxc\") pod \"587857be-cc5b-43cb-bf66-d9e7aadcc587\" (UID: \"587857be-cc5b-43cb-bf66-d9e7aadcc587\") " Sep 29 19:24:45 crc kubenswrapper[4779]: I0929 19:24:45.498848 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/587857be-cc5b-43cb-bf66-d9e7aadcc587-swiftconf\") pod \"587857be-cc5b-43cb-bf66-d9e7aadcc587\" (UID: \"587857be-cc5b-43cb-bf66-d9e7aadcc587\") " Sep 29 19:24:45 crc kubenswrapper[4779]: I0929 19:24:45.499085 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-26bj8\" (UniqueName: \"kubernetes.io/projected/ba0df867-9aba-41a8-8359-4c93514a9115-kube-api-access-26bj8\") pod \"keystone-5bc9-account-create-dbrvg\" (UID: \"ba0df867-9aba-41a8-8359-4c93514a9115\") " pod="openstack/keystone-5bc9-account-create-dbrvg" Sep 29 19:24:45 crc kubenswrapper[4779]: I0929 19:24:45.500357 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/587857be-cc5b-43cb-bf66-d9e7aadcc587-ring-data-devices" (OuterVolumeSpecName: "ring-data-devices") pod "587857be-cc5b-43cb-bf66-d9e7aadcc587" (UID: "587857be-cc5b-43cb-bf66-d9e7aadcc587"). InnerVolumeSpecName "ring-data-devices". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:24:45 crc kubenswrapper[4779]: I0929 19:24:45.500477 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/587857be-cc5b-43cb-bf66-d9e7aadcc587-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "587857be-cc5b-43cb-bf66-d9e7aadcc587" (UID: "587857be-cc5b-43cb-bf66-d9e7aadcc587"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 19:24:45 crc kubenswrapper[4779]: I0929 19:24:45.506644 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/587857be-cc5b-43cb-bf66-d9e7aadcc587-kube-api-access-w8xxc" (OuterVolumeSpecName: "kube-api-access-w8xxc") pod "587857be-cc5b-43cb-bf66-d9e7aadcc587" (UID: "587857be-cc5b-43cb-bf66-d9e7aadcc587"). InnerVolumeSpecName "kube-api-access-w8xxc". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:24:45 crc kubenswrapper[4779]: I0929 19:24:45.514678 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/587857be-cc5b-43cb-bf66-d9e7aadcc587-dispersionconf" (OuterVolumeSpecName: "dispersionconf") pod "587857be-cc5b-43cb-bf66-d9e7aadcc587" (UID: "587857be-cc5b-43cb-bf66-d9e7aadcc587"). InnerVolumeSpecName "dispersionconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:24:45 crc kubenswrapper[4779]: I0929 19:24:45.523077 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/587857be-cc5b-43cb-bf66-d9e7aadcc587-scripts" (OuterVolumeSpecName: "scripts") pod "587857be-cc5b-43cb-bf66-d9e7aadcc587" (UID: "587857be-cc5b-43cb-bf66-d9e7aadcc587"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:24:45 crc kubenswrapper[4779]: I0929 19:24:45.527278 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-26bj8\" (UniqueName: \"kubernetes.io/projected/ba0df867-9aba-41a8-8359-4c93514a9115-kube-api-access-26bj8\") pod \"keystone-5bc9-account-create-dbrvg\" (UID: \"ba0df867-9aba-41a8-8359-4c93514a9115\") " pod="openstack/keystone-5bc9-account-create-dbrvg" Sep 29 19:24:45 crc kubenswrapper[4779]: I0929 19:24:45.534035 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/587857be-cc5b-43cb-bf66-d9e7aadcc587-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "587857be-cc5b-43cb-bf66-d9e7aadcc587" (UID: "587857be-cc5b-43cb-bf66-d9e7aadcc587"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:24:45 crc kubenswrapper[4779]: I0929 19:24:45.536099 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/587857be-cc5b-43cb-bf66-d9e7aadcc587-swiftconf" (OuterVolumeSpecName: "swiftconf") pod "587857be-cc5b-43cb-bf66-d9e7aadcc587" (UID: "587857be-cc5b-43cb-bf66-d9e7aadcc587"). InnerVolumeSpecName "swiftconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:24:45 crc kubenswrapper[4779]: I0929 19:24:45.600014 4779 reconciler_common.go:293] "Volume detached for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/587857be-cc5b-43cb-bf66-d9e7aadcc587-ring-data-devices\") on node \"crc\" DevicePath \"\"" Sep 29 19:24:45 crc kubenswrapper[4779]: I0929 19:24:45.600045 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w8xxc\" (UniqueName: \"kubernetes.io/projected/587857be-cc5b-43cb-bf66-d9e7aadcc587-kube-api-access-w8xxc\") on node \"crc\" DevicePath \"\"" Sep 29 19:24:45 crc kubenswrapper[4779]: I0929 19:24:45.600056 4779 reconciler_common.go:293] "Volume detached for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/587857be-cc5b-43cb-bf66-d9e7aadcc587-swiftconf\") on node \"crc\" DevicePath \"\"" Sep 29 19:24:45 crc kubenswrapper[4779]: I0929 19:24:45.600064 4779 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/587857be-cc5b-43cb-bf66-d9e7aadcc587-scripts\") on node \"crc\" DevicePath \"\"" Sep 29 19:24:45 crc kubenswrapper[4779]: I0929 19:24:45.600073 4779 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/587857be-cc5b-43cb-bf66-d9e7aadcc587-etc-swift\") on node \"crc\" DevicePath \"\"" Sep 29 19:24:45 crc kubenswrapper[4779]: I0929 19:24:45.600083 4779 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/587857be-cc5b-43cb-bf66-d9e7aadcc587-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 19:24:45 crc kubenswrapper[4779]: I0929 19:24:45.600091 4779 reconciler_common.go:293] "Volume detached for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/587857be-cc5b-43cb-bf66-d9e7aadcc587-dispersionconf\") on node \"crc\" DevicePath \"\"" Sep 29 19:24:45 crc kubenswrapper[4779]: I0929 19:24:45.619811 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-8b67-account-create-lzcfr"] Sep 29 19:24:45 crc kubenswrapper[4779]: E0929 19:24:45.620095 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="587857be-cc5b-43cb-bf66-d9e7aadcc587" containerName="swift-ring-rebalance" Sep 29 19:24:45 crc kubenswrapper[4779]: I0929 19:24:45.620109 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="587857be-cc5b-43cb-bf66-d9e7aadcc587" containerName="swift-ring-rebalance" Sep 29 19:24:45 crc kubenswrapper[4779]: I0929 19:24:45.620284 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="587857be-cc5b-43cb-bf66-d9e7aadcc587" containerName="swift-ring-rebalance" Sep 29 19:24:45 crc kubenswrapper[4779]: I0929 19:24:45.620749 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-8b67-account-create-lzcfr" Sep 29 19:24:45 crc kubenswrapper[4779]: I0929 19:24:45.629263 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-db-secret" Sep 29 19:24:45 crc kubenswrapper[4779]: I0929 19:24:45.633891 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-8b67-account-create-lzcfr"] Sep 29 19:24:45 crc kubenswrapper[4779]: I0929 19:24:45.647118 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-5bc9-account-create-dbrvg" Sep 29 19:24:45 crc kubenswrapper[4779]: I0929 19:24:45.701548 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nqm6m\" (UniqueName: \"kubernetes.io/projected/e1b73437-bcee-4bd0-82ff-3a88a0eebb4c-kube-api-access-nqm6m\") pod \"placement-8b67-account-create-lzcfr\" (UID: \"e1b73437-bcee-4bd0-82ff-3a88a0eebb4c\") " pod="openstack/placement-8b67-account-create-lzcfr" Sep 29 19:24:45 crc kubenswrapper[4779]: I0929 19:24:45.790462 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-storage-0"] Sep 29 19:24:45 crc kubenswrapper[4779]: I0929 19:24:45.805835 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nqm6m\" (UniqueName: \"kubernetes.io/projected/e1b73437-bcee-4bd0-82ff-3a88a0eebb4c-kube-api-access-nqm6m\") pod \"placement-8b67-account-create-lzcfr\" (UID: \"e1b73437-bcee-4bd0-82ff-3a88a0eebb4c\") " pod="openstack/placement-8b67-account-create-lzcfr" Sep 29 19:24:45 crc kubenswrapper[4779]: I0929 19:24:45.825123 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nqm6m\" (UniqueName: \"kubernetes.io/projected/e1b73437-bcee-4bd0-82ff-3a88a0eebb4c-kube-api-access-nqm6m\") pod \"placement-8b67-account-create-lzcfr\" (UID: \"e1b73437-bcee-4bd0-82ff-3a88a0eebb4c\") " pod="openstack/placement-8b67-account-create-lzcfr" Sep 29 19:24:45 crc kubenswrapper[4779]: I0929 19:24:45.947035 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-8b67-account-create-lzcfr" Sep 29 19:24:46 crc kubenswrapper[4779]: I0929 19:24:46.101007 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-5bc9-account-create-dbrvg"] Sep 29 19:24:46 crc kubenswrapper[4779]: I0929 19:24:46.130428 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-5bc9-account-create-dbrvg" event={"ID":"ba0df867-9aba-41a8-8359-4c93514a9115","Type":"ContainerStarted","Data":"defaad1f01669ae1538caad4a2d29551bd9ac5add5ff9681a033c6aec5bfdefa"} Sep 29 19:24:46 crc kubenswrapper[4779]: I0929 19:24:46.133590 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-db-sync-kqr8g"] Sep 29 19:24:46 crc kubenswrapper[4779]: I0929 19:24:46.134657 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-kqr8g" Sep 29 19:24:46 crc kubenswrapper[4779]: I0929 19:24:46.137099 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-config-data" Sep 29 19:24:46 crc kubenswrapper[4779]: I0929 19:24:46.137471 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-mttwt" Sep 29 19:24:46 crc kubenswrapper[4779]: I0929 19:24:46.137811 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"523fd020-2e02-4807-93b8-82ecbd1152eb","Type":"ContainerStarted","Data":"f34ea16198ddb29fae0653ed2ebdd84481e2bc0a905b30a3be2c039009b4e204"} Sep 29 19:24:46 crc kubenswrapper[4779]: I0929 19:24:46.138403 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-server-0" Sep 29 19:24:46 crc kubenswrapper[4779]: I0929 19:24:46.140276 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"a214376c-8f64-4f89-9354-14de32e2f17f","Type":"ContainerStarted","Data":"d9ddd41978a055f22000170dac9bef681a9b5a3468c68169e3a5102226573591"} Sep 29 19:24:46 crc kubenswrapper[4779]: I0929 19:24:46.145310 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-sync-kqr8g"] Sep 29 19:24:46 crc kubenswrapper[4779]: I0929 19:24:46.160168 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-wzdw7" Sep 29 19:24:46 crc kubenswrapper[4779]: I0929 19:24:46.160603 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-wzdw7" event={"ID":"587857be-cc5b-43cb-bf66-d9e7aadcc587","Type":"ContainerDied","Data":"570c87f99f9d79c266c4730ab8d4ff3d23dcd7b9be070ce5e6a871518afd0555"} Sep 29 19:24:46 crc kubenswrapper[4779]: I0929 19:24:46.160626 4779 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="570c87f99f9d79c266c4730ab8d4ff3d23dcd7b9be070ce5e6a871518afd0555" Sep 29 19:24:46 crc kubenswrapper[4779]: I0929 19:24:46.193135 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-server-0" podStartSLOduration=48.892154806 podStartE2EDuration="56.193117487s" podCreationTimestamp="2025-09-29 19:23:50 +0000 UTC" firstStartedPulling="2025-09-29 19:24:05.473574254 +0000 UTC m=+956.357999364" lastFinishedPulling="2025-09-29 19:24:12.774536945 +0000 UTC m=+963.658962045" observedRunningTime="2025-09-29 19:24:46.187255658 +0000 UTC m=+997.071680748" watchObservedRunningTime="2025-09-29 19:24:46.193117487 +0000 UTC m=+997.077542587" Sep 29 19:24:46 crc kubenswrapper[4779]: I0929 19:24:46.212112 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2eea816e-5b9e-4646-bd6a-2421436d9c90-config-data\") pod \"glance-db-sync-kqr8g\" (UID: \"2eea816e-5b9e-4646-bd6a-2421436d9c90\") " pod="openstack/glance-db-sync-kqr8g" Sep 29 19:24:46 crc kubenswrapper[4779]: I0929 19:24:46.212160 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/2eea816e-5b9e-4646-bd6a-2421436d9c90-db-sync-config-data\") pod \"glance-db-sync-kqr8g\" (UID: \"2eea816e-5b9e-4646-bd6a-2421436d9c90\") " pod="openstack/glance-db-sync-kqr8g" Sep 29 19:24:46 crc kubenswrapper[4779]: I0929 19:24:46.212185 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2nhlf\" (UniqueName: \"kubernetes.io/projected/2eea816e-5b9e-4646-bd6a-2421436d9c90-kube-api-access-2nhlf\") pod \"glance-db-sync-kqr8g\" (UID: \"2eea816e-5b9e-4646-bd6a-2421436d9c90\") " pod="openstack/glance-db-sync-kqr8g" Sep 29 19:24:46 crc kubenswrapper[4779]: I0929 19:24:46.212222 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2eea816e-5b9e-4646-bd6a-2421436d9c90-combined-ca-bundle\") pod \"glance-db-sync-kqr8g\" (UID: \"2eea816e-5b9e-4646-bd6a-2421436d9c90\") " pod="openstack/glance-db-sync-kqr8g" Sep 29 19:24:46 crc kubenswrapper[4779]: I0929 19:24:46.269916 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-8b67-account-create-lzcfr"] Sep 29 19:24:46 crc kubenswrapper[4779]: I0929 19:24:46.314177 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2eea816e-5b9e-4646-bd6a-2421436d9c90-config-data\") pod \"glance-db-sync-kqr8g\" (UID: \"2eea816e-5b9e-4646-bd6a-2421436d9c90\") " pod="openstack/glance-db-sync-kqr8g" Sep 29 19:24:46 crc kubenswrapper[4779]: I0929 19:24:46.314228 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/2eea816e-5b9e-4646-bd6a-2421436d9c90-db-sync-config-data\") pod \"glance-db-sync-kqr8g\" (UID: \"2eea816e-5b9e-4646-bd6a-2421436d9c90\") " pod="openstack/glance-db-sync-kqr8g" Sep 29 19:24:46 crc kubenswrapper[4779]: I0929 19:24:46.314255 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2nhlf\" (UniqueName: \"kubernetes.io/projected/2eea816e-5b9e-4646-bd6a-2421436d9c90-kube-api-access-2nhlf\") pod \"glance-db-sync-kqr8g\" (UID: \"2eea816e-5b9e-4646-bd6a-2421436d9c90\") " pod="openstack/glance-db-sync-kqr8g" Sep 29 19:24:46 crc kubenswrapper[4779]: I0929 19:24:46.314294 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2eea816e-5b9e-4646-bd6a-2421436d9c90-combined-ca-bundle\") pod \"glance-db-sync-kqr8g\" (UID: \"2eea816e-5b9e-4646-bd6a-2421436d9c90\") " pod="openstack/glance-db-sync-kqr8g" Sep 29 19:24:46 crc kubenswrapper[4779]: I0929 19:24:46.319584 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2eea816e-5b9e-4646-bd6a-2421436d9c90-combined-ca-bundle\") pod \"glance-db-sync-kqr8g\" (UID: \"2eea816e-5b9e-4646-bd6a-2421436d9c90\") " pod="openstack/glance-db-sync-kqr8g" Sep 29 19:24:46 crc kubenswrapper[4779]: I0929 19:24:46.323887 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2eea816e-5b9e-4646-bd6a-2421436d9c90-config-data\") pod \"glance-db-sync-kqr8g\" (UID: \"2eea816e-5b9e-4646-bd6a-2421436d9c90\") " pod="openstack/glance-db-sync-kqr8g" Sep 29 19:24:46 crc kubenswrapper[4779]: I0929 19:24:46.324153 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/2eea816e-5b9e-4646-bd6a-2421436d9c90-db-sync-config-data\") pod \"glance-db-sync-kqr8g\" (UID: \"2eea816e-5b9e-4646-bd6a-2421436d9c90\") " pod="openstack/glance-db-sync-kqr8g" Sep 29 19:24:46 crc kubenswrapper[4779]: I0929 19:24:46.333993 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2nhlf\" (UniqueName: \"kubernetes.io/projected/2eea816e-5b9e-4646-bd6a-2421436d9c90-kube-api-access-2nhlf\") pod \"glance-db-sync-kqr8g\" (UID: \"2eea816e-5b9e-4646-bd6a-2421436d9c90\") " pod="openstack/glance-db-sync-kqr8g" Sep 29 19:24:46 crc kubenswrapper[4779]: I0929 19:24:46.555162 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-kqr8g" Sep 29 19:24:47 crc kubenswrapper[4779]: I0929 19:24:47.169489 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"a214376c-8f64-4f89-9354-14de32e2f17f","Type":"ContainerStarted","Data":"0eec7a68f9903184bee8ddef93f5c707e3dc9b2d247da77e5c6dba388e83062c"} Sep 29 19:24:47 crc kubenswrapper[4779]: I0929 19:24:47.171771 4779 generic.go:334] "Generic (PLEG): container finished" podID="ba0df867-9aba-41a8-8359-4c93514a9115" containerID="9e83a75fa8662bc7c160d36dbe67201c1fbd53e867327259fb84c3bbeaab3d39" exitCode=0 Sep 29 19:24:47 crc kubenswrapper[4779]: I0929 19:24:47.171819 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-5bc9-account-create-dbrvg" event={"ID":"ba0df867-9aba-41a8-8359-4c93514a9115","Type":"ContainerDied","Data":"9e83a75fa8662bc7c160d36dbe67201c1fbd53e867327259fb84c3bbeaab3d39"} Sep 29 19:24:47 crc kubenswrapper[4779]: I0929 19:24:47.173182 4779 generic.go:334] "Generic (PLEG): container finished" podID="e1b73437-bcee-4bd0-82ff-3a88a0eebb4c" containerID="71923f0bc15f888e5cd4744b897d266373e28fa132a149eb0d06dd90f977b607" exitCode=0 Sep 29 19:24:47 crc kubenswrapper[4779]: I0929 19:24:47.173404 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-8b67-account-create-lzcfr" event={"ID":"e1b73437-bcee-4bd0-82ff-3a88a0eebb4c","Type":"ContainerDied","Data":"71923f0bc15f888e5cd4744b897d266373e28fa132a149eb0d06dd90f977b607"} Sep 29 19:24:47 crc kubenswrapper[4779]: I0929 19:24:47.173457 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-8b67-account-create-lzcfr" event={"ID":"e1b73437-bcee-4bd0-82ff-3a88a0eebb4c","Type":"ContainerStarted","Data":"ab9ac5acc10772e235e6a50078a45714efbe113c3c3b1adfa45e9c687045a5b4"} Sep 29 19:24:47 crc kubenswrapper[4779]: I0929 19:24:47.331786 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-sync-kqr8g"] Sep 29 19:24:47 crc kubenswrapper[4779]: W0929 19:24:47.354334 4779 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2eea816e_5b9e_4646_bd6a_2421436d9c90.slice/crio-8c00e755279c0952ee70d67896278cffb2a2418b3a4c4701c61f8fa8a018fe16 WatchSource:0}: Error finding container 8c00e755279c0952ee70d67896278cffb2a2418b3a4c4701c61f8fa8a018fe16: Status 404 returned error can't find the container with id 8c00e755279c0952ee70d67896278cffb2a2418b3a4c4701c61f8fa8a018fe16 Sep 29 19:24:47 crc kubenswrapper[4779]: I0929 19:24:47.660987 4779 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ovn-controller-rwqzd" podUID="9771c712-00ce-4dcf-ab04-7b6893c8725c" containerName="ovn-controller" probeResult="failure" output=< Sep 29 19:24:47 crc kubenswrapper[4779]: ERROR - ovn-controller connection status is 'not connected', expecting 'connected' status Sep 29 19:24:47 crc kubenswrapper[4779]: > Sep 29 19:24:47 crc kubenswrapper[4779]: I0929 19:24:47.661014 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-ovs-lx5dn" Sep 29 19:24:47 crc kubenswrapper[4779]: I0929 19:24:47.675142 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-ovs-lx5dn" Sep 29 19:24:47 crc kubenswrapper[4779]: I0929 19:24:47.888010 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-rwqzd-config-cwj2b"] Sep 29 19:24:47 crc kubenswrapper[4779]: I0929 19:24:47.889353 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-rwqzd-config-cwj2b" Sep 29 19:24:47 crc kubenswrapper[4779]: I0929 19:24:47.891733 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-extra-scripts" Sep 29 19:24:47 crc kubenswrapper[4779]: I0929 19:24:47.912230 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-rwqzd-config-cwj2b"] Sep 29 19:24:48 crc kubenswrapper[4779]: I0929 19:24:48.044737 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lbwlh\" (UniqueName: \"kubernetes.io/projected/05cf3a2a-ba44-499b-8d99-340478292197-kube-api-access-lbwlh\") pod \"ovn-controller-rwqzd-config-cwj2b\" (UID: \"05cf3a2a-ba44-499b-8d99-340478292197\") " pod="openstack/ovn-controller-rwqzd-config-cwj2b" Sep 29 19:24:48 crc kubenswrapper[4779]: I0929 19:24:48.044800 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/05cf3a2a-ba44-499b-8d99-340478292197-var-run-ovn\") pod \"ovn-controller-rwqzd-config-cwj2b\" (UID: \"05cf3a2a-ba44-499b-8d99-340478292197\") " pod="openstack/ovn-controller-rwqzd-config-cwj2b" Sep 29 19:24:48 crc kubenswrapper[4779]: I0929 19:24:48.044843 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/05cf3a2a-ba44-499b-8d99-340478292197-scripts\") pod \"ovn-controller-rwqzd-config-cwj2b\" (UID: \"05cf3a2a-ba44-499b-8d99-340478292197\") " pod="openstack/ovn-controller-rwqzd-config-cwj2b" Sep 29 19:24:48 crc kubenswrapper[4779]: I0929 19:24:48.044916 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/05cf3a2a-ba44-499b-8d99-340478292197-var-log-ovn\") pod \"ovn-controller-rwqzd-config-cwj2b\" (UID: \"05cf3a2a-ba44-499b-8d99-340478292197\") " pod="openstack/ovn-controller-rwqzd-config-cwj2b" Sep 29 19:24:48 crc kubenswrapper[4779]: I0929 19:24:48.045227 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/05cf3a2a-ba44-499b-8d99-340478292197-additional-scripts\") pod \"ovn-controller-rwqzd-config-cwj2b\" (UID: \"05cf3a2a-ba44-499b-8d99-340478292197\") " pod="openstack/ovn-controller-rwqzd-config-cwj2b" Sep 29 19:24:48 crc kubenswrapper[4779]: I0929 19:24:48.045254 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/05cf3a2a-ba44-499b-8d99-340478292197-var-run\") pod \"ovn-controller-rwqzd-config-cwj2b\" (UID: \"05cf3a2a-ba44-499b-8d99-340478292197\") " pod="openstack/ovn-controller-rwqzd-config-cwj2b" Sep 29 19:24:48 crc kubenswrapper[4779]: I0929 19:24:48.146769 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lbwlh\" (UniqueName: \"kubernetes.io/projected/05cf3a2a-ba44-499b-8d99-340478292197-kube-api-access-lbwlh\") pod \"ovn-controller-rwqzd-config-cwj2b\" (UID: \"05cf3a2a-ba44-499b-8d99-340478292197\") " pod="openstack/ovn-controller-rwqzd-config-cwj2b" Sep 29 19:24:48 crc kubenswrapper[4779]: I0929 19:24:48.146834 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/05cf3a2a-ba44-499b-8d99-340478292197-var-run-ovn\") pod \"ovn-controller-rwqzd-config-cwj2b\" (UID: \"05cf3a2a-ba44-499b-8d99-340478292197\") " pod="openstack/ovn-controller-rwqzd-config-cwj2b" Sep 29 19:24:48 crc kubenswrapper[4779]: I0929 19:24:48.146855 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/05cf3a2a-ba44-499b-8d99-340478292197-scripts\") pod \"ovn-controller-rwqzd-config-cwj2b\" (UID: \"05cf3a2a-ba44-499b-8d99-340478292197\") " pod="openstack/ovn-controller-rwqzd-config-cwj2b" Sep 29 19:24:48 crc kubenswrapper[4779]: I0929 19:24:48.146900 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/05cf3a2a-ba44-499b-8d99-340478292197-var-log-ovn\") pod \"ovn-controller-rwqzd-config-cwj2b\" (UID: \"05cf3a2a-ba44-499b-8d99-340478292197\") " pod="openstack/ovn-controller-rwqzd-config-cwj2b" Sep 29 19:24:48 crc kubenswrapper[4779]: I0929 19:24:48.146958 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/05cf3a2a-ba44-499b-8d99-340478292197-additional-scripts\") pod \"ovn-controller-rwqzd-config-cwj2b\" (UID: \"05cf3a2a-ba44-499b-8d99-340478292197\") " pod="openstack/ovn-controller-rwqzd-config-cwj2b" Sep 29 19:24:48 crc kubenswrapper[4779]: I0929 19:24:48.146976 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/05cf3a2a-ba44-499b-8d99-340478292197-var-run\") pod \"ovn-controller-rwqzd-config-cwj2b\" (UID: \"05cf3a2a-ba44-499b-8d99-340478292197\") " pod="openstack/ovn-controller-rwqzd-config-cwj2b" Sep 29 19:24:48 crc kubenswrapper[4779]: I0929 19:24:48.147288 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/05cf3a2a-ba44-499b-8d99-340478292197-var-run\") pod \"ovn-controller-rwqzd-config-cwj2b\" (UID: \"05cf3a2a-ba44-499b-8d99-340478292197\") " pod="openstack/ovn-controller-rwqzd-config-cwj2b" Sep 29 19:24:48 crc kubenswrapper[4779]: I0929 19:24:48.147358 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/05cf3a2a-ba44-499b-8d99-340478292197-var-log-ovn\") pod \"ovn-controller-rwqzd-config-cwj2b\" (UID: \"05cf3a2a-ba44-499b-8d99-340478292197\") " pod="openstack/ovn-controller-rwqzd-config-cwj2b" Sep 29 19:24:48 crc kubenswrapper[4779]: I0929 19:24:48.147520 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/05cf3a2a-ba44-499b-8d99-340478292197-var-run-ovn\") pod \"ovn-controller-rwqzd-config-cwj2b\" (UID: \"05cf3a2a-ba44-499b-8d99-340478292197\") " pod="openstack/ovn-controller-rwqzd-config-cwj2b" Sep 29 19:24:48 crc kubenswrapper[4779]: I0929 19:24:48.147999 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/05cf3a2a-ba44-499b-8d99-340478292197-additional-scripts\") pod \"ovn-controller-rwqzd-config-cwj2b\" (UID: \"05cf3a2a-ba44-499b-8d99-340478292197\") " pod="openstack/ovn-controller-rwqzd-config-cwj2b" Sep 29 19:24:48 crc kubenswrapper[4779]: I0929 19:24:48.148889 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/05cf3a2a-ba44-499b-8d99-340478292197-scripts\") pod \"ovn-controller-rwqzd-config-cwj2b\" (UID: \"05cf3a2a-ba44-499b-8d99-340478292197\") " pod="openstack/ovn-controller-rwqzd-config-cwj2b" Sep 29 19:24:48 crc kubenswrapper[4779]: I0929 19:24:48.165349 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lbwlh\" (UniqueName: \"kubernetes.io/projected/05cf3a2a-ba44-499b-8d99-340478292197-kube-api-access-lbwlh\") pod \"ovn-controller-rwqzd-config-cwj2b\" (UID: \"05cf3a2a-ba44-499b-8d99-340478292197\") " pod="openstack/ovn-controller-rwqzd-config-cwj2b" Sep 29 19:24:48 crc kubenswrapper[4779]: I0929 19:24:48.182768 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"a214376c-8f64-4f89-9354-14de32e2f17f","Type":"ContainerStarted","Data":"0a72db66707ea0c04d0733dc2600a03fd56eb46c47783a6933ff07142e4e1a09"} Sep 29 19:24:48 crc kubenswrapper[4779]: I0929 19:24:48.182802 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"a214376c-8f64-4f89-9354-14de32e2f17f","Type":"ContainerStarted","Data":"9c266e3537bfb66d66cfba3a1f2368151b84d1f55681aef378287666f2e801ad"} Sep 29 19:24:48 crc kubenswrapper[4779]: I0929 19:24:48.182814 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"a214376c-8f64-4f89-9354-14de32e2f17f","Type":"ContainerStarted","Data":"632c220294ffac14024ebada328beaa41289ff59dffa053bdc5257f96eaa74e7"} Sep 29 19:24:48 crc kubenswrapper[4779]: I0929 19:24:48.184801 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-kqr8g" event={"ID":"2eea816e-5b9e-4646-bd6a-2421436d9c90","Type":"ContainerStarted","Data":"8c00e755279c0952ee70d67896278cffb2a2418b3a4c4701c61f8fa8a018fe16"} Sep 29 19:24:48 crc kubenswrapper[4779]: I0929 19:24:48.210872 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-rwqzd-config-cwj2b" Sep 29 19:24:48 crc kubenswrapper[4779]: I0929 19:24:48.598701 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-8b67-account-create-lzcfr" Sep 29 19:24:48 crc kubenswrapper[4779]: I0929 19:24:48.606120 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-5bc9-account-create-dbrvg" Sep 29 19:24:48 crc kubenswrapper[4779]: I0929 19:24:48.728336 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-rwqzd-config-cwj2b"] Sep 29 19:24:48 crc kubenswrapper[4779]: I0929 19:24:48.760933 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-26bj8\" (UniqueName: \"kubernetes.io/projected/ba0df867-9aba-41a8-8359-4c93514a9115-kube-api-access-26bj8\") pod \"ba0df867-9aba-41a8-8359-4c93514a9115\" (UID: \"ba0df867-9aba-41a8-8359-4c93514a9115\") " Sep 29 19:24:48 crc kubenswrapper[4779]: I0929 19:24:48.761020 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nqm6m\" (UniqueName: \"kubernetes.io/projected/e1b73437-bcee-4bd0-82ff-3a88a0eebb4c-kube-api-access-nqm6m\") pod \"e1b73437-bcee-4bd0-82ff-3a88a0eebb4c\" (UID: \"e1b73437-bcee-4bd0-82ff-3a88a0eebb4c\") " Sep 29 19:24:48 crc kubenswrapper[4779]: I0929 19:24:48.767292 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e1b73437-bcee-4bd0-82ff-3a88a0eebb4c-kube-api-access-nqm6m" (OuterVolumeSpecName: "kube-api-access-nqm6m") pod "e1b73437-bcee-4bd0-82ff-3a88a0eebb4c" (UID: "e1b73437-bcee-4bd0-82ff-3a88a0eebb4c"). InnerVolumeSpecName "kube-api-access-nqm6m". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:24:48 crc kubenswrapper[4779]: I0929 19:24:48.768845 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ba0df867-9aba-41a8-8359-4c93514a9115-kube-api-access-26bj8" (OuterVolumeSpecName: "kube-api-access-26bj8") pod "ba0df867-9aba-41a8-8359-4c93514a9115" (UID: "ba0df867-9aba-41a8-8359-4c93514a9115"). InnerVolumeSpecName "kube-api-access-26bj8". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:24:48 crc kubenswrapper[4779]: I0929 19:24:48.862976 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nqm6m\" (UniqueName: \"kubernetes.io/projected/e1b73437-bcee-4bd0-82ff-3a88a0eebb4c-kube-api-access-nqm6m\") on node \"crc\" DevicePath \"\"" Sep 29 19:24:48 crc kubenswrapper[4779]: I0929 19:24:48.863005 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-26bj8\" (UniqueName: \"kubernetes.io/projected/ba0df867-9aba-41a8-8359-4c93514a9115-kube-api-access-26bj8\") on node \"crc\" DevicePath \"\"" Sep 29 19:24:49 crc kubenswrapper[4779]: I0929 19:24:49.196228 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-5bc9-account-create-dbrvg" event={"ID":"ba0df867-9aba-41a8-8359-4c93514a9115","Type":"ContainerDied","Data":"defaad1f01669ae1538caad4a2d29551bd9ac5add5ff9681a033c6aec5bfdefa"} Sep 29 19:24:49 crc kubenswrapper[4779]: I0929 19:24:49.196575 4779 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="defaad1f01669ae1538caad4a2d29551bd9ac5add5ff9681a033c6aec5bfdefa" Sep 29 19:24:49 crc kubenswrapper[4779]: I0929 19:24:49.196643 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-5bc9-account-create-dbrvg" Sep 29 19:24:49 crc kubenswrapper[4779]: I0929 19:24:49.201200 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-8b67-account-create-lzcfr" event={"ID":"e1b73437-bcee-4bd0-82ff-3a88a0eebb4c","Type":"ContainerDied","Data":"ab9ac5acc10772e235e6a50078a45714efbe113c3c3b1adfa45e9c687045a5b4"} Sep 29 19:24:49 crc kubenswrapper[4779]: I0929 19:24:49.201254 4779 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ab9ac5acc10772e235e6a50078a45714efbe113c3c3b1adfa45e9c687045a5b4" Sep 29 19:24:49 crc kubenswrapper[4779]: I0929 19:24:49.201405 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-8b67-account-create-lzcfr" Sep 29 19:24:49 crc kubenswrapper[4779]: I0929 19:24:49.204510 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-rwqzd-config-cwj2b" event={"ID":"05cf3a2a-ba44-499b-8d99-340478292197","Type":"ContainerStarted","Data":"2b34b6ea2d1737b8e500a5a9013b5f579c86e770b03c3467094526764ded29d0"} Sep 29 19:24:50 crc kubenswrapper[4779]: I0929 19:24:50.218214 4779 generic.go:334] "Generic (PLEG): container finished" podID="05cf3a2a-ba44-499b-8d99-340478292197" containerID="5c0a20d704274465016a7eaa27cafa17f905c0659e4eb33186c0c8abb2ec88df" exitCode=0 Sep 29 19:24:50 crc kubenswrapper[4779]: I0929 19:24:50.218307 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-rwqzd-config-cwj2b" event={"ID":"05cf3a2a-ba44-499b-8d99-340478292197","Type":"ContainerDied","Data":"5c0a20d704274465016a7eaa27cafa17f905c0659e4eb33186c0c8abb2ec88df"} Sep 29 19:24:50 crc kubenswrapper[4779]: I0929 19:24:50.230340 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"a214376c-8f64-4f89-9354-14de32e2f17f","Type":"ContainerStarted","Data":"854ee192e307fd89392d62d1c0552c9ade36c2cb7b43a154bcb144861adc0734"} Sep 29 19:24:50 crc kubenswrapper[4779]: I0929 19:24:50.230380 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"a214376c-8f64-4f89-9354-14de32e2f17f","Type":"ContainerStarted","Data":"a3f51bcee0733eb0fe4b904362ed62bc3f53be22476423d6371a57582b872630"} Sep 29 19:24:50 crc kubenswrapper[4779]: I0929 19:24:50.230393 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"a214376c-8f64-4f89-9354-14de32e2f17f","Type":"ContainerStarted","Data":"390f71e9be4c4d66ba31e21184382b8c8b653cdf0605dd53cede1558f78078fb"} Sep 29 19:24:50 crc kubenswrapper[4779]: I0929 19:24:50.230404 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"a214376c-8f64-4f89-9354-14de32e2f17f","Type":"ContainerStarted","Data":"2a52080c567d7ca1feee3a18bd48ffe9bdf2cc5d689aa0429264734e0718a331"} Sep 29 19:24:51 crc kubenswrapper[4779]: I0929 19:24:51.242873 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"a214376c-8f64-4f89-9354-14de32e2f17f","Type":"ContainerStarted","Data":"e09c0c76bc07c4ae3760b7d3d3d01e08f788c939501f9f44d862e788165e13f1"} Sep 29 19:24:51 crc kubenswrapper[4779]: I0929 19:24:51.623558 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-rwqzd-config-cwj2b" Sep 29 19:24:51 crc kubenswrapper[4779]: I0929 19:24:51.811463 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/05cf3a2a-ba44-499b-8d99-340478292197-additional-scripts\") pod \"05cf3a2a-ba44-499b-8d99-340478292197\" (UID: \"05cf3a2a-ba44-499b-8d99-340478292197\") " Sep 29 19:24:51 crc kubenswrapper[4779]: I0929 19:24:51.811503 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/05cf3a2a-ba44-499b-8d99-340478292197-var-run-ovn\") pod \"05cf3a2a-ba44-499b-8d99-340478292197\" (UID: \"05cf3a2a-ba44-499b-8d99-340478292197\") " Sep 29 19:24:51 crc kubenswrapper[4779]: I0929 19:24:51.811540 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/05cf3a2a-ba44-499b-8d99-340478292197-var-run\") pod \"05cf3a2a-ba44-499b-8d99-340478292197\" (UID: \"05cf3a2a-ba44-499b-8d99-340478292197\") " Sep 29 19:24:51 crc kubenswrapper[4779]: I0929 19:24:51.811562 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lbwlh\" (UniqueName: \"kubernetes.io/projected/05cf3a2a-ba44-499b-8d99-340478292197-kube-api-access-lbwlh\") pod \"05cf3a2a-ba44-499b-8d99-340478292197\" (UID: \"05cf3a2a-ba44-499b-8d99-340478292197\") " Sep 29 19:24:51 crc kubenswrapper[4779]: I0929 19:24:51.811646 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/05cf3a2a-ba44-499b-8d99-340478292197-scripts\") pod \"05cf3a2a-ba44-499b-8d99-340478292197\" (UID: \"05cf3a2a-ba44-499b-8d99-340478292197\") " Sep 29 19:24:51 crc kubenswrapper[4779]: I0929 19:24:51.811738 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/05cf3a2a-ba44-499b-8d99-340478292197-var-log-ovn\") pod \"05cf3a2a-ba44-499b-8d99-340478292197\" (UID: \"05cf3a2a-ba44-499b-8d99-340478292197\") " Sep 29 19:24:51 crc kubenswrapper[4779]: I0929 19:24:51.812051 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/05cf3a2a-ba44-499b-8d99-340478292197-var-log-ovn" (OuterVolumeSpecName: "var-log-ovn") pod "05cf3a2a-ba44-499b-8d99-340478292197" (UID: "05cf3a2a-ba44-499b-8d99-340478292197"). InnerVolumeSpecName "var-log-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 29 19:24:51 crc kubenswrapper[4779]: I0929 19:24:51.812417 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/05cf3a2a-ba44-499b-8d99-340478292197-var-run-ovn" (OuterVolumeSpecName: "var-run-ovn") pod "05cf3a2a-ba44-499b-8d99-340478292197" (UID: "05cf3a2a-ba44-499b-8d99-340478292197"). InnerVolumeSpecName "var-run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 29 19:24:51 crc kubenswrapper[4779]: I0929 19:24:51.812477 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/05cf3a2a-ba44-499b-8d99-340478292197-var-run" (OuterVolumeSpecName: "var-run") pod "05cf3a2a-ba44-499b-8d99-340478292197" (UID: "05cf3a2a-ba44-499b-8d99-340478292197"). InnerVolumeSpecName "var-run". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 29 19:24:51 crc kubenswrapper[4779]: I0929 19:24:51.812723 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/05cf3a2a-ba44-499b-8d99-340478292197-additional-scripts" (OuterVolumeSpecName: "additional-scripts") pod "05cf3a2a-ba44-499b-8d99-340478292197" (UID: "05cf3a2a-ba44-499b-8d99-340478292197"). InnerVolumeSpecName "additional-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:24:51 crc kubenswrapper[4779]: I0929 19:24:51.813140 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/05cf3a2a-ba44-499b-8d99-340478292197-scripts" (OuterVolumeSpecName: "scripts") pod "05cf3a2a-ba44-499b-8d99-340478292197" (UID: "05cf3a2a-ba44-499b-8d99-340478292197"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:24:51 crc kubenswrapper[4779]: I0929 19:24:51.818753 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/05cf3a2a-ba44-499b-8d99-340478292197-kube-api-access-lbwlh" (OuterVolumeSpecName: "kube-api-access-lbwlh") pod "05cf3a2a-ba44-499b-8d99-340478292197" (UID: "05cf3a2a-ba44-499b-8d99-340478292197"). InnerVolumeSpecName "kube-api-access-lbwlh". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:24:51 crc kubenswrapper[4779]: I0929 19:24:51.917718 4779 reconciler_common.go:293] "Volume detached for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/05cf3a2a-ba44-499b-8d99-340478292197-var-log-ovn\") on node \"crc\" DevicePath \"\"" Sep 29 19:24:51 crc kubenswrapper[4779]: I0929 19:24:51.918004 4779 reconciler_common.go:293] "Volume detached for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/05cf3a2a-ba44-499b-8d99-340478292197-additional-scripts\") on node \"crc\" DevicePath \"\"" Sep 29 19:24:51 crc kubenswrapper[4779]: I0929 19:24:51.918023 4779 reconciler_common.go:293] "Volume detached for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/05cf3a2a-ba44-499b-8d99-340478292197-var-run-ovn\") on node \"crc\" DevicePath \"\"" Sep 29 19:24:51 crc kubenswrapper[4779]: I0929 19:24:51.918039 4779 reconciler_common.go:293] "Volume detached for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/05cf3a2a-ba44-499b-8d99-340478292197-var-run\") on node \"crc\" DevicePath \"\"" Sep 29 19:24:51 crc kubenswrapper[4779]: I0929 19:24:51.918059 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lbwlh\" (UniqueName: \"kubernetes.io/projected/05cf3a2a-ba44-499b-8d99-340478292197-kube-api-access-lbwlh\") on node \"crc\" DevicePath \"\"" Sep 29 19:24:51 crc kubenswrapper[4779]: I0929 19:24:51.918072 4779 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/05cf3a2a-ba44-499b-8d99-340478292197-scripts\") on node \"crc\" DevicePath \"\"" Sep 29 19:24:52 crc kubenswrapper[4779]: I0929 19:24:52.257745 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-rwqzd-config-cwj2b" event={"ID":"05cf3a2a-ba44-499b-8d99-340478292197","Type":"ContainerDied","Data":"2b34b6ea2d1737b8e500a5a9013b5f579c86e770b03c3467094526764ded29d0"} Sep 29 19:24:52 crc kubenswrapper[4779]: I0929 19:24:52.257809 4779 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2b34b6ea2d1737b8e500a5a9013b5f579c86e770b03c3467094526764ded29d0" Sep 29 19:24:52 crc kubenswrapper[4779]: I0929 19:24:52.257769 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-rwqzd-config-cwj2b" Sep 29 19:24:52 crc kubenswrapper[4779]: I0929 19:24:52.265736 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"a214376c-8f64-4f89-9354-14de32e2f17f","Type":"ContainerStarted","Data":"e18b3025f69a4559b1fba6f37b2ea486bea0fb6d5a0204431156df8dd25279aa"} Sep 29 19:24:52 crc kubenswrapper[4779]: I0929 19:24:52.265781 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"a214376c-8f64-4f89-9354-14de32e2f17f","Type":"ContainerStarted","Data":"1a6fe6df294bce95a184a1cbf5170e5df1df867d4dcf6e5f53fdc7f13d014967"} Sep 29 19:24:52 crc kubenswrapper[4779]: I0929 19:24:52.265791 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"a214376c-8f64-4f89-9354-14de32e2f17f","Type":"ContainerStarted","Data":"7c003f633791befc926ef36f5808ead69e876ab4d995f39f8229837edefcab9f"} Sep 29 19:24:52 crc kubenswrapper[4779]: I0929 19:24:52.265800 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"a214376c-8f64-4f89-9354-14de32e2f17f","Type":"ContainerStarted","Data":"dd0969286ad798ca7dde1cdc758f9d64d90ee6645b7caaa2c5fabbe137cc390d"} Sep 29 19:24:52 crc kubenswrapper[4779]: I0929 19:24:52.265811 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"a214376c-8f64-4f89-9354-14de32e2f17f","Type":"ContainerStarted","Data":"3dba00b8b71412fcbf017c54c6f336aa5ecacb9932b3d380979f1bd6b6e856bf"} Sep 29 19:24:52 crc kubenswrapper[4779]: I0929 19:24:52.265819 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"a214376c-8f64-4f89-9354-14de32e2f17f","Type":"ContainerStarted","Data":"7341ee0c27fc42786da1fe2daa2ed713e00c70cc3387733aa993fdcfac2a510a"} Sep 29 19:24:52 crc kubenswrapper[4779]: I0929 19:24:52.312584 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-storage-0" podStartSLOduration=20.20056254 podStartE2EDuration="25.312567488s" podCreationTimestamp="2025-09-29 19:24:27 +0000 UTC" firstStartedPulling="2025-09-29 19:24:45.796011279 +0000 UTC m=+996.680436379" lastFinishedPulling="2025-09-29 19:24:50.908016227 +0000 UTC m=+1001.792441327" observedRunningTime="2025-09-29 19:24:52.311453288 +0000 UTC m=+1003.195878388" watchObservedRunningTime="2025-09-29 19:24:52.312567488 +0000 UTC m=+1003.196992578" Sep 29 19:24:52 crc kubenswrapper[4779]: I0929 19:24:52.562855 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-77585f5f8c-zkpbk"] Sep 29 19:24:52 crc kubenswrapper[4779]: E0929 19:24:52.563168 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e1b73437-bcee-4bd0-82ff-3a88a0eebb4c" containerName="mariadb-account-create" Sep 29 19:24:52 crc kubenswrapper[4779]: I0929 19:24:52.563181 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="e1b73437-bcee-4bd0-82ff-3a88a0eebb4c" containerName="mariadb-account-create" Sep 29 19:24:52 crc kubenswrapper[4779]: E0929 19:24:52.563201 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="05cf3a2a-ba44-499b-8d99-340478292197" containerName="ovn-config" Sep 29 19:24:52 crc kubenswrapper[4779]: I0929 19:24:52.563207 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="05cf3a2a-ba44-499b-8d99-340478292197" containerName="ovn-config" Sep 29 19:24:52 crc kubenswrapper[4779]: E0929 19:24:52.563219 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ba0df867-9aba-41a8-8359-4c93514a9115" containerName="mariadb-account-create" Sep 29 19:24:52 crc kubenswrapper[4779]: I0929 19:24:52.563226 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="ba0df867-9aba-41a8-8359-4c93514a9115" containerName="mariadb-account-create" Sep 29 19:24:52 crc kubenswrapper[4779]: I0929 19:24:52.563395 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="05cf3a2a-ba44-499b-8d99-340478292197" containerName="ovn-config" Sep 29 19:24:52 crc kubenswrapper[4779]: I0929 19:24:52.563426 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="ba0df867-9aba-41a8-8359-4c93514a9115" containerName="mariadb-account-create" Sep 29 19:24:52 crc kubenswrapper[4779]: I0929 19:24:52.563440 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="e1b73437-bcee-4bd0-82ff-3a88a0eebb4c" containerName="mariadb-account-create" Sep 29 19:24:52 crc kubenswrapper[4779]: I0929 19:24:52.564409 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-77585f5f8c-zkpbk" Sep 29 19:24:52 crc kubenswrapper[4779]: I0929 19:24:52.571535 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns-swift-storage-0" Sep 29 19:24:52 crc kubenswrapper[4779]: I0929 19:24:52.579240 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-77585f5f8c-zkpbk"] Sep 29 19:24:52 crc kubenswrapper[4779]: I0929 19:24:52.678536 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-rwqzd" Sep 29 19:24:52 crc kubenswrapper[4779]: I0929 19:24:52.714563 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-rwqzd-config-cwj2b"] Sep 29 19:24:52 crc kubenswrapper[4779]: I0929 19:24:52.719671 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-controller-rwqzd-config-cwj2b"] Sep 29 19:24:52 crc kubenswrapper[4779]: I0929 19:24:52.737947 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09860743-67e8-4aa3-9814-90b13f91317e-config\") pod \"dnsmasq-dns-77585f5f8c-zkpbk\" (UID: \"09860743-67e8-4aa3-9814-90b13f91317e\") " pod="openstack/dnsmasq-dns-77585f5f8c-zkpbk" Sep 29 19:24:52 crc kubenswrapper[4779]: I0929 19:24:52.737996 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/09860743-67e8-4aa3-9814-90b13f91317e-dns-svc\") pod \"dnsmasq-dns-77585f5f8c-zkpbk\" (UID: \"09860743-67e8-4aa3-9814-90b13f91317e\") " pod="openstack/dnsmasq-dns-77585f5f8c-zkpbk" Sep 29 19:24:52 crc kubenswrapper[4779]: I0929 19:24:52.738112 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/09860743-67e8-4aa3-9814-90b13f91317e-dns-swift-storage-0\") pod \"dnsmasq-dns-77585f5f8c-zkpbk\" (UID: \"09860743-67e8-4aa3-9814-90b13f91317e\") " pod="openstack/dnsmasq-dns-77585f5f8c-zkpbk" Sep 29 19:24:52 crc kubenswrapper[4779]: I0929 19:24:52.738260 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dtt98\" (UniqueName: \"kubernetes.io/projected/09860743-67e8-4aa3-9814-90b13f91317e-kube-api-access-dtt98\") pod \"dnsmasq-dns-77585f5f8c-zkpbk\" (UID: \"09860743-67e8-4aa3-9814-90b13f91317e\") " pod="openstack/dnsmasq-dns-77585f5f8c-zkpbk" Sep 29 19:24:52 crc kubenswrapper[4779]: I0929 19:24:52.738285 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/09860743-67e8-4aa3-9814-90b13f91317e-ovsdbserver-sb\") pod \"dnsmasq-dns-77585f5f8c-zkpbk\" (UID: \"09860743-67e8-4aa3-9814-90b13f91317e\") " pod="openstack/dnsmasq-dns-77585f5f8c-zkpbk" Sep 29 19:24:52 crc kubenswrapper[4779]: I0929 19:24:52.738394 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/09860743-67e8-4aa3-9814-90b13f91317e-ovsdbserver-nb\") pod \"dnsmasq-dns-77585f5f8c-zkpbk\" (UID: \"09860743-67e8-4aa3-9814-90b13f91317e\") " pod="openstack/dnsmasq-dns-77585f5f8c-zkpbk" Sep 29 19:24:52 crc kubenswrapper[4779]: I0929 19:24:52.840146 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/09860743-67e8-4aa3-9814-90b13f91317e-ovsdbserver-nb\") pod \"dnsmasq-dns-77585f5f8c-zkpbk\" (UID: \"09860743-67e8-4aa3-9814-90b13f91317e\") " pod="openstack/dnsmasq-dns-77585f5f8c-zkpbk" Sep 29 19:24:52 crc kubenswrapper[4779]: I0929 19:24:52.840279 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09860743-67e8-4aa3-9814-90b13f91317e-config\") pod \"dnsmasq-dns-77585f5f8c-zkpbk\" (UID: \"09860743-67e8-4aa3-9814-90b13f91317e\") " pod="openstack/dnsmasq-dns-77585f5f8c-zkpbk" Sep 29 19:24:52 crc kubenswrapper[4779]: I0929 19:24:52.840303 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/09860743-67e8-4aa3-9814-90b13f91317e-dns-svc\") pod \"dnsmasq-dns-77585f5f8c-zkpbk\" (UID: \"09860743-67e8-4aa3-9814-90b13f91317e\") " pod="openstack/dnsmasq-dns-77585f5f8c-zkpbk" Sep 29 19:24:52 crc kubenswrapper[4779]: I0929 19:24:52.840373 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/09860743-67e8-4aa3-9814-90b13f91317e-dns-swift-storage-0\") pod \"dnsmasq-dns-77585f5f8c-zkpbk\" (UID: \"09860743-67e8-4aa3-9814-90b13f91317e\") " pod="openstack/dnsmasq-dns-77585f5f8c-zkpbk" Sep 29 19:24:52 crc kubenswrapper[4779]: I0929 19:24:52.841254 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/09860743-67e8-4aa3-9814-90b13f91317e-ovsdbserver-nb\") pod \"dnsmasq-dns-77585f5f8c-zkpbk\" (UID: \"09860743-67e8-4aa3-9814-90b13f91317e\") " pod="openstack/dnsmasq-dns-77585f5f8c-zkpbk" Sep 29 19:24:52 crc kubenswrapper[4779]: I0929 19:24:52.841302 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/09860743-67e8-4aa3-9814-90b13f91317e-dns-svc\") pod \"dnsmasq-dns-77585f5f8c-zkpbk\" (UID: \"09860743-67e8-4aa3-9814-90b13f91317e\") " pod="openstack/dnsmasq-dns-77585f5f8c-zkpbk" Sep 29 19:24:52 crc kubenswrapper[4779]: I0929 19:24:52.841628 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/09860743-67e8-4aa3-9814-90b13f91317e-dns-swift-storage-0\") pod \"dnsmasq-dns-77585f5f8c-zkpbk\" (UID: \"09860743-67e8-4aa3-9814-90b13f91317e\") " pod="openstack/dnsmasq-dns-77585f5f8c-zkpbk" Sep 29 19:24:52 crc kubenswrapper[4779]: I0929 19:24:52.841618 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09860743-67e8-4aa3-9814-90b13f91317e-config\") pod \"dnsmasq-dns-77585f5f8c-zkpbk\" (UID: \"09860743-67e8-4aa3-9814-90b13f91317e\") " pod="openstack/dnsmasq-dns-77585f5f8c-zkpbk" Sep 29 19:24:52 crc kubenswrapper[4779]: I0929 19:24:52.842068 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/09860743-67e8-4aa3-9814-90b13f91317e-ovsdbserver-sb\") pod \"dnsmasq-dns-77585f5f8c-zkpbk\" (UID: \"09860743-67e8-4aa3-9814-90b13f91317e\") " pod="openstack/dnsmasq-dns-77585f5f8c-zkpbk" Sep 29 19:24:52 crc kubenswrapper[4779]: I0929 19:24:52.842113 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dtt98\" (UniqueName: \"kubernetes.io/projected/09860743-67e8-4aa3-9814-90b13f91317e-kube-api-access-dtt98\") pod \"dnsmasq-dns-77585f5f8c-zkpbk\" (UID: \"09860743-67e8-4aa3-9814-90b13f91317e\") " pod="openstack/dnsmasq-dns-77585f5f8c-zkpbk" Sep 29 19:24:52 crc kubenswrapper[4779]: I0929 19:24:52.842771 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/09860743-67e8-4aa3-9814-90b13f91317e-ovsdbserver-sb\") pod \"dnsmasq-dns-77585f5f8c-zkpbk\" (UID: \"09860743-67e8-4aa3-9814-90b13f91317e\") " pod="openstack/dnsmasq-dns-77585f5f8c-zkpbk" Sep 29 19:24:52 crc kubenswrapper[4779]: I0929 19:24:52.876200 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dtt98\" (UniqueName: \"kubernetes.io/projected/09860743-67e8-4aa3-9814-90b13f91317e-kube-api-access-dtt98\") pod \"dnsmasq-dns-77585f5f8c-zkpbk\" (UID: \"09860743-67e8-4aa3-9814-90b13f91317e\") " pod="openstack/dnsmasq-dns-77585f5f8c-zkpbk" Sep 29 19:24:52 crc kubenswrapper[4779]: I0929 19:24:52.888303 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-77585f5f8c-zkpbk" Sep 29 19:24:53 crc kubenswrapper[4779]: I0929 19:24:53.377975 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-77585f5f8c-zkpbk"] Sep 29 19:24:53 crc kubenswrapper[4779]: I0929 19:24:53.785025 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="05cf3a2a-ba44-499b-8d99-340478292197" path="/var/lib/kubelet/pods/05cf3a2a-ba44-499b-8d99-340478292197/volumes" Sep 29 19:24:54 crc kubenswrapper[4779]: I0929 19:24:54.284752 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-77585f5f8c-zkpbk" event={"ID":"09860743-67e8-4aa3-9814-90b13f91317e","Type":"ContainerStarted","Data":"5b04d55fae55c80bef39a75a8e83ea45a9b90659b64abee32ec86f04bea1ceef"} Sep 29 19:25:00 crc kubenswrapper[4779]: I0929 19:25:00.350038 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-kqr8g" event={"ID":"2eea816e-5b9e-4646-bd6a-2421436d9c90","Type":"ContainerStarted","Data":"d247d11fd42cdb449ad4242ad724d481e72a91c40d19d74e638d5bae1b2ec898"} Sep 29 19:25:00 crc kubenswrapper[4779]: I0929 19:25:00.352636 4779 generic.go:334] "Generic (PLEG): container finished" podID="09860743-67e8-4aa3-9814-90b13f91317e" containerID="4251d7f94f1c1eae6eaa621a7bbdd14cf2221341173d7f4c187d74f6aab3cf09" exitCode=0 Sep 29 19:25:00 crc kubenswrapper[4779]: I0929 19:25:00.352676 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-77585f5f8c-zkpbk" event={"ID":"09860743-67e8-4aa3-9814-90b13f91317e","Type":"ContainerDied","Data":"4251d7f94f1c1eae6eaa621a7bbdd14cf2221341173d7f4c187d74f6aab3cf09"} Sep 29 19:25:00 crc kubenswrapper[4779]: I0929 19:25:00.370640 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-db-sync-kqr8g" podStartSLOduration=1.937592715 podStartE2EDuration="14.370624405s" podCreationTimestamp="2025-09-29 19:24:46 +0000 UTC" firstStartedPulling="2025-09-29 19:24:47.356715079 +0000 UTC m=+998.241140179" lastFinishedPulling="2025-09-29 19:24:59.789746759 +0000 UTC m=+1010.674171869" observedRunningTime="2025-09-29 19:25:00.365423804 +0000 UTC m=+1011.249848934" watchObservedRunningTime="2025-09-29 19:25:00.370624405 +0000 UTC m=+1011.255049505" Sep 29 19:25:01 crc kubenswrapper[4779]: I0929 19:25:01.376635 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-77585f5f8c-zkpbk" event={"ID":"09860743-67e8-4aa3-9814-90b13f91317e","Type":"ContainerStarted","Data":"501b263905e99102640de533c53dee9756db975bff779ac2b3171660964d3b35"} Sep 29 19:25:01 crc kubenswrapper[4779]: I0929 19:25:01.377313 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-77585f5f8c-zkpbk" Sep 29 19:25:01 crc kubenswrapper[4779]: I0929 19:25:01.420206 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-77585f5f8c-zkpbk" podStartSLOduration=9.420182982 podStartE2EDuration="9.420182982s" podCreationTimestamp="2025-09-29 19:24:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 19:25:01.415918196 +0000 UTC m=+1012.300343326" watchObservedRunningTime="2025-09-29 19:25:01.420182982 +0000 UTC m=+1012.304608112" Sep 29 19:25:01 crc kubenswrapper[4779]: I0929 19:25:01.983625 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-server-0" Sep 29 19:25:02 crc kubenswrapper[4779]: I0929 19:25:02.277501 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-cell1-server-0" Sep 29 19:25:02 crc kubenswrapper[4779]: I0929 19:25:02.393090 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-db-create-k7hwz"] Sep 29 19:25:02 crc kubenswrapper[4779]: I0929 19:25:02.394983 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-k7hwz" Sep 29 19:25:02 crc kubenswrapper[4779]: I0929 19:25:02.400248 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-create-k7hwz"] Sep 29 19:25:02 crc kubenswrapper[4779]: I0929 19:25:02.487372 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-db-create-7ttv2"] Sep 29 19:25:02 crc kubenswrapper[4779]: I0929 19:25:02.488437 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-7ttv2" Sep 29 19:25:02 crc kubenswrapper[4779]: I0929 19:25:02.498679 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-create-7ttv2"] Sep 29 19:25:02 crc kubenswrapper[4779]: I0929 19:25:02.515145 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qmfdb\" (UniqueName: \"kubernetes.io/projected/73a66bfb-fbcf-4e22-93aa-e8d91aa2892f-kube-api-access-qmfdb\") pod \"cinder-db-create-k7hwz\" (UID: \"73a66bfb-fbcf-4e22-93aa-e8d91aa2892f\") " pod="openstack/cinder-db-create-k7hwz" Sep 29 19:25:02 crc kubenswrapper[4779]: I0929 19:25:02.586657 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-db-create-zrrxv"] Sep 29 19:25:02 crc kubenswrapper[4779]: I0929 19:25:02.587864 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-zrrxv" Sep 29 19:25:02 crc kubenswrapper[4779]: I0929 19:25:02.596040 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-create-zrrxv"] Sep 29 19:25:02 crc kubenswrapper[4779]: I0929 19:25:02.616271 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qmfdb\" (UniqueName: \"kubernetes.io/projected/73a66bfb-fbcf-4e22-93aa-e8d91aa2892f-kube-api-access-qmfdb\") pod \"cinder-db-create-k7hwz\" (UID: \"73a66bfb-fbcf-4e22-93aa-e8d91aa2892f\") " pod="openstack/cinder-db-create-k7hwz" Sep 29 19:25:02 crc kubenswrapper[4779]: I0929 19:25:02.616356 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8wpz8\" (UniqueName: \"kubernetes.io/projected/1eb16ed1-d3da-47d3-bb78-26e3b67fbc90-kube-api-access-8wpz8\") pod \"barbican-db-create-7ttv2\" (UID: \"1eb16ed1-d3da-47d3-bb78-26e3b67fbc90\") " pod="openstack/barbican-db-create-7ttv2" Sep 29 19:25:02 crc kubenswrapper[4779]: I0929 19:25:02.649484 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qmfdb\" (UniqueName: \"kubernetes.io/projected/73a66bfb-fbcf-4e22-93aa-e8d91aa2892f-kube-api-access-qmfdb\") pod \"cinder-db-create-k7hwz\" (UID: \"73a66bfb-fbcf-4e22-93aa-e8d91aa2892f\") " pod="openstack/cinder-db-create-k7hwz" Sep 29 19:25:02 crc kubenswrapper[4779]: I0929 19:25:02.680852 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-db-sync-4ww89"] Sep 29 19:25:02 crc kubenswrapper[4779]: I0929 19:25:02.682338 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-4ww89" Sep 29 19:25:02 crc kubenswrapper[4779]: I0929 19:25:02.684903 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Sep 29 19:25:02 crc kubenswrapper[4779]: I0929 19:25:02.685992 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Sep 29 19:25:02 crc kubenswrapper[4779]: I0929 19:25:02.686236 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-m8zpl" Sep 29 19:25:02 crc kubenswrapper[4779]: I0929 19:25:02.686874 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Sep 29 19:25:02 crc kubenswrapper[4779]: I0929 19:25:02.703668 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-sync-4ww89"] Sep 29 19:25:02 crc kubenswrapper[4779]: I0929 19:25:02.717960 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-k7hwz" Sep 29 19:25:02 crc kubenswrapper[4779]: I0929 19:25:02.718648 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f6klh\" (UniqueName: \"kubernetes.io/projected/8267d69e-ea3c-4782-93d0-0bcf9a95bdf1-kube-api-access-f6klh\") pod \"neutron-db-create-zrrxv\" (UID: \"8267d69e-ea3c-4782-93d0-0bcf9a95bdf1\") " pod="openstack/neutron-db-create-zrrxv" Sep 29 19:25:02 crc kubenswrapper[4779]: I0929 19:25:02.718725 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8wpz8\" (UniqueName: \"kubernetes.io/projected/1eb16ed1-d3da-47d3-bb78-26e3b67fbc90-kube-api-access-8wpz8\") pod \"barbican-db-create-7ttv2\" (UID: \"1eb16ed1-d3da-47d3-bb78-26e3b67fbc90\") " pod="openstack/barbican-db-create-7ttv2" Sep 29 19:25:02 crc kubenswrapper[4779]: I0929 19:25:02.770897 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8wpz8\" (UniqueName: \"kubernetes.io/projected/1eb16ed1-d3da-47d3-bb78-26e3b67fbc90-kube-api-access-8wpz8\") pod \"barbican-db-create-7ttv2\" (UID: \"1eb16ed1-d3da-47d3-bb78-26e3b67fbc90\") " pod="openstack/barbican-db-create-7ttv2" Sep 29 19:25:02 crc kubenswrapper[4779]: I0929 19:25:02.805105 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-7ttv2" Sep 29 19:25:02 crc kubenswrapper[4779]: I0929 19:25:02.820070 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e8978916-c429-4c70-8f74-93e9a49a8ae7-config-data\") pod \"keystone-db-sync-4ww89\" (UID: \"e8978916-c429-4c70-8f74-93e9a49a8ae7\") " pod="openstack/keystone-db-sync-4ww89" Sep 29 19:25:02 crc kubenswrapper[4779]: I0929 19:25:02.820135 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dcj6q\" (UniqueName: \"kubernetes.io/projected/e8978916-c429-4c70-8f74-93e9a49a8ae7-kube-api-access-dcj6q\") pod \"keystone-db-sync-4ww89\" (UID: \"e8978916-c429-4c70-8f74-93e9a49a8ae7\") " pod="openstack/keystone-db-sync-4ww89" Sep 29 19:25:02 crc kubenswrapper[4779]: I0929 19:25:02.820183 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f6klh\" (UniqueName: \"kubernetes.io/projected/8267d69e-ea3c-4782-93d0-0bcf9a95bdf1-kube-api-access-f6klh\") pod \"neutron-db-create-zrrxv\" (UID: \"8267d69e-ea3c-4782-93d0-0bcf9a95bdf1\") " pod="openstack/neutron-db-create-zrrxv" Sep 29 19:25:02 crc kubenswrapper[4779]: I0929 19:25:02.820268 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e8978916-c429-4c70-8f74-93e9a49a8ae7-combined-ca-bundle\") pod \"keystone-db-sync-4ww89\" (UID: \"e8978916-c429-4c70-8f74-93e9a49a8ae7\") " pod="openstack/keystone-db-sync-4ww89" Sep 29 19:25:02 crc kubenswrapper[4779]: I0929 19:25:02.862972 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f6klh\" (UniqueName: \"kubernetes.io/projected/8267d69e-ea3c-4782-93d0-0bcf9a95bdf1-kube-api-access-f6klh\") pod \"neutron-db-create-zrrxv\" (UID: \"8267d69e-ea3c-4782-93d0-0bcf9a95bdf1\") " pod="openstack/neutron-db-create-zrrxv" Sep 29 19:25:02 crc kubenswrapper[4779]: I0929 19:25:02.904153 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-zrrxv" Sep 29 19:25:02 crc kubenswrapper[4779]: I0929 19:25:02.921857 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dcj6q\" (UniqueName: \"kubernetes.io/projected/e8978916-c429-4c70-8f74-93e9a49a8ae7-kube-api-access-dcj6q\") pod \"keystone-db-sync-4ww89\" (UID: \"e8978916-c429-4c70-8f74-93e9a49a8ae7\") " pod="openstack/keystone-db-sync-4ww89" Sep 29 19:25:02 crc kubenswrapper[4779]: I0929 19:25:02.922161 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e8978916-c429-4c70-8f74-93e9a49a8ae7-combined-ca-bundle\") pod \"keystone-db-sync-4ww89\" (UID: \"e8978916-c429-4c70-8f74-93e9a49a8ae7\") " pod="openstack/keystone-db-sync-4ww89" Sep 29 19:25:02 crc kubenswrapper[4779]: I0929 19:25:02.922249 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e8978916-c429-4c70-8f74-93e9a49a8ae7-config-data\") pod \"keystone-db-sync-4ww89\" (UID: \"e8978916-c429-4c70-8f74-93e9a49a8ae7\") " pod="openstack/keystone-db-sync-4ww89" Sep 29 19:25:02 crc kubenswrapper[4779]: I0929 19:25:02.930037 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e8978916-c429-4c70-8f74-93e9a49a8ae7-combined-ca-bundle\") pod \"keystone-db-sync-4ww89\" (UID: \"e8978916-c429-4c70-8f74-93e9a49a8ae7\") " pod="openstack/keystone-db-sync-4ww89" Sep 29 19:25:02 crc kubenswrapper[4779]: I0929 19:25:02.930593 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e8978916-c429-4c70-8f74-93e9a49a8ae7-config-data\") pod \"keystone-db-sync-4ww89\" (UID: \"e8978916-c429-4c70-8f74-93e9a49a8ae7\") " pod="openstack/keystone-db-sync-4ww89" Sep 29 19:25:02 crc kubenswrapper[4779]: I0929 19:25:02.950206 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dcj6q\" (UniqueName: \"kubernetes.io/projected/e8978916-c429-4c70-8f74-93e9a49a8ae7-kube-api-access-dcj6q\") pod \"keystone-db-sync-4ww89\" (UID: \"e8978916-c429-4c70-8f74-93e9a49a8ae7\") " pod="openstack/keystone-db-sync-4ww89" Sep 29 19:25:03 crc kubenswrapper[4779]: I0929 19:25:03.018834 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-4ww89" Sep 29 19:25:03 crc kubenswrapper[4779]: I0929 19:25:03.326424 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-create-k7hwz"] Sep 29 19:25:03 crc kubenswrapper[4779]: W0929 19:25:03.344703 4779 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod73a66bfb_fbcf_4e22_93aa_e8d91aa2892f.slice/crio-f6f0e0da8896cebdb65faab10a7b21f0f7a0891bee42b29e262ead58c4579f4f WatchSource:0}: Error finding container f6f0e0da8896cebdb65faab10a7b21f0f7a0891bee42b29e262ead58c4579f4f: Status 404 returned error can't find the container with id f6f0e0da8896cebdb65faab10a7b21f0f7a0891bee42b29e262ead58c4579f4f Sep 29 19:25:03 crc kubenswrapper[4779]: I0929 19:25:03.345107 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-create-7ttv2"] Sep 29 19:25:03 crc kubenswrapper[4779]: I0929 19:25:03.395703 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-7ttv2" event={"ID":"1eb16ed1-d3da-47d3-bb78-26e3b67fbc90","Type":"ContainerStarted","Data":"00288761496e9fbabfdade118c21c94b663ce1f03f8498842bcf062ad75033b6"} Sep 29 19:25:03 crc kubenswrapper[4779]: I0929 19:25:03.397096 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-k7hwz" event={"ID":"73a66bfb-fbcf-4e22-93aa-e8d91aa2892f","Type":"ContainerStarted","Data":"f6f0e0da8896cebdb65faab10a7b21f0f7a0891bee42b29e262ead58c4579f4f"} Sep 29 19:25:03 crc kubenswrapper[4779]: I0929 19:25:03.458367 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-create-zrrxv"] Sep 29 19:25:03 crc kubenswrapper[4779]: W0929 19:25:03.462273 4779 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8267d69e_ea3c_4782_93d0_0bcf9a95bdf1.slice/crio-22b9e6d428afd37bde27da4d62a550c72fee5165c9c621ece6398bdd1ddeb96c WatchSource:0}: Error finding container 22b9e6d428afd37bde27da4d62a550c72fee5165c9c621ece6398bdd1ddeb96c: Status 404 returned error can't find the container with id 22b9e6d428afd37bde27da4d62a550c72fee5165c9c621ece6398bdd1ddeb96c Sep 29 19:25:03 crc kubenswrapper[4779]: I0929 19:25:03.533153 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-sync-4ww89"] Sep 29 19:25:03 crc kubenswrapper[4779]: W0929 19:25:03.543920 4779 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode8978916_c429_4c70_8f74_93e9a49a8ae7.slice/crio-28b471dfae2933d92a78b84f17636fb4e341675fa5daa82eefbb6012f142284f WatchSource:0}: Error finding container 28b471dfae2933d92a78b84f17636fb4e341675fa5daa82eefbb6012f142284f: Status 404 returned error can't find the container with id 28b471dfae2933d92a78b84f17636fb4e341675fa5daa82eefbb6012f142284f Sep 29 19:25:04 crc kubenswrapper[4779]: I0929 19:25:04.408528 4779 generic.go:334] "Generic (PLEG): container finished" podID="1eb16ed1-d3da-47d3-bb78-26e3b67fbc90" containerID="5266be70869be4aafd3e27e00724b9889d8f93061c53c946cb9eba201f86e811" exitCode=0 Sep 29 19:25:04 crc kubenswrapper[4779]: I0929 19:25:04.408616 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-7ttv2" event={"ID":"1eb16ed1-d3da-47d3-bb78-26e3b67fbc90","Type":"ContainerDied","Data":"5266be70869be4aafd3e27e00724b9889d8f93061c53c946cb9eba201f86e811"} Sep 29 19:25:04 crc kubenswrapper[4779]: I0929 19:25:04.410256 4779 generic.go:334] "Generic (PLEG): container finished" podID="73a66bfb-fbcf-4e22-93aa-e8d91aa2892f" containerID="8b630a1b098eec73f5c8b0f2124b002327083b57620a94344709aff52d0af6bf" exitCode=0 Sep 29 19:25:04 crc kubenswrapper[4779]: I0929 19:25:04.410330 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-k7hwz" event={"ID":"73a66bfb-fbcf-4e22-93aa-e8d91aa2892f","Type":"ContainerDied","Data":"8b630a1b098eec73f5c8b0f2124b002327083b57620a94344709aff52d0af6bf"} Sep 29 19:25:04 crc kubenswrapper[4779]: I0929 19:25:04.411172 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-4ww89" event={"ID":"e8978916-c429-4c70-8f74-93e9a49a8ae7","Type":"ContainerStarted","Data":"28b471dfae2933d92a78b84f17636fb4e341675fa5daa82eefbb6012f142284f"} Sep 29 19:25:04 crc kubenswrapper[4779]: I0929 19:25:04.412146 4779 generic.go:334] "Generic (PLEG): container finished" podID="8267d69e-ea3c-4782-93d0-0bcf9a95bdf1" containerID="47e4fe6eb9a5a9707e44407bbcccba3eab68529066ae254fbd72f01e3ccb9b40" exitCode=0 Sep 29 19:25:04 crc kubenswrapper[4779]: I0929 19:25:04.412173 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-zrrxv" event={"ID":"8267d69e-ea3c-4782-93d0-0bcf9a95bdf1","Type":"ContainerDied","Data":"47e4fe6eb9a5a9707e44407bbcccba3eab68529066ae254fbd72f01e3ccb9b40"} Sep 29 19:25:04 crc kubenswrapper[4779]: I0929 19:25:04.412189 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-zrrxv" event={"ID":"8267d69e-ea3c-4782-93d0-0bcf9a95bdf1","Type":"ContainerStarted","Data":"22b9e6d428afd37bde27da4d62a550c72fee5165c9c621ece6398bdd1ddeb96c"} Sep 29 19:25:07 crc kubenswrapper[4779]: I0929 19:25:07.773899 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-zrrxv" Sep 29 19:25:07 crc kubenswrapper[4779]: I0929 19:25:07.814395 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-f6klh\" (UniqueName: \"kubernetes.io/projected/8267d69e-ea3c-4782-93d0-0bcf9a95bdf1-kube-api-access-f6klh\") pod \"8267d69e-ea3c-4782-93d0-0bcf9a95bdf1\" (UID: \"8267d69e-ea3c-4782-93d0-0bcf9a95bdf1\") " Sep 29 19:25:07 crc kubenswrapper[4779]: I0929 19:25:07.821699 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8267d69e-ea3c-4782-93d0-0bcf9a95bdf1-kube-api-access-f6klh" (OuterVolumeSpecName: "kube-api-access-f6klh") pod "8267d69e-ea3c-4782-93d0-0bcf9a95bdf1" (UID: "8267d69e-ea3c-4782-93d0-0bcf9a95bdf1"). InnerVolumeSpecName "kube-api-access-f6klh". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:25:07 crc kubenswrapper[4779]: I0929 19:25:07.825255 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-7ttv2" Sep 29 19:25:07 crc kubenswrapper[4779]: I0929 19:25:07.887111 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-k7hwz" Sep 29 19:25:07 crc kubenswrapper[4779]: I0929 19:25:07.892156 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-77585f5f8c-zkpbk" Sep 29 19:25:07 crc kubenswrapper[4779]: I0929 19:25:07.915989 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8wpz8\" (UniqueName: \"kubernetes.io/projected/1eb16ed1-d3da-47d3-bb78-26e3b67fbc90-kube-api-access-8wpz8\") pod \"1eb16ed1-d3da-47d3-bb78-26e3b67fbc90\" (UID: \"1eb16ed1-d3da-47d3-bb78-26e3b67fbc90\") " Sep 29 19:25:07 crc kubenswrapper[4779]: I0929 19:25:07.916427 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-f6klh\" (UniqueName: \"kubernetes.io/projected/8267d69e-ea3c-4782-93d0-0bcf9a95bdf1-kube-api-access-f6klh\") on node \"crc\" DevicePath \"\"" Sep 29 19:25:07 crc kubenswrapper[4779]: I0929 19:25:07.919914 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1eb16ed1-d3da-47d3-bb78-26e3b67fbc90-kube-api-access-8wpz8" (OuterVolumeSpecName: "kube-api-access-8wpz8") pod "1eb16ed1-d3da-47d3-bb78-26e3b67fbc90" (UID: "1eb16ed1-d3da-47d3-bb78-26e3b67fbc90"). InnerVolumeSpecName "kube-api-access-8wpz8". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:25:07 crc kubenswrapper[4779]: I0929 19:25:07.963791 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-698758b865-lww4s"] Sep 29 19:25:07 crc kubenswrapper[4779]: I0929 19:25:07.964045 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-698758b865-lww4s" podUID="7b7daa08-8885-418b-bff6-582ebb727b73" containerName="dnsmasq-dns" containerID="cri-o://1377035065f0110ebd717796769b740e2864a55c93408ade32341b22f1fb7a77" gracePeriod=10 Sep 29 19:25:08 crc kubenswrapper[4779]: I0929 19:25:08.017590 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qmfdb\" (UniqueName: \"kubernetes.io/projected/73a66bfb-fbcf-4e22-93aa-e8d91aa2892f-kube-api-access-qmfdb\") pod \"73a66bfb-fbcf-4e22-93aa-e8d91aa2892f\" (UID: \"73a66bfb-fbcf-4e22-93aa-e8d91aa2892f\") " Sep 29 19:25:08 crc kubenswrapper[4779]: I0929 19:25:08.018011 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8wpz8\" (UniqueName: \"kubernetes.io/projected/1eb16ed1-d3da-47d3-bb78-26e3b67fbc90-kube-api-access-8wpz8\") on node \"crc\" DevicePath \"\"" Sep 29 19:25:08 crc kubenswrapper[4779]: I0929 19:25:08.022740 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/73a66bfb-fbcf-4e22-93aa-e8d91aa2892f-kube-api-access-qmfdb" (OuterVolumeSpecName: "kube-api-access-qmfdb") pod "73a66bfb-fbcf-4e22-93aa-e8d91aa2892f" (UID: "73a66bfb-fbcf-4e22-93aa-e8d91aa2892f"). InnerVolumeSpecName "kube-api-access-qmfdb". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:25:08 crc kubenswrapper[4779]: I0929 19:25:08.027679 4779 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-698758b865-lww4s" podUID="7b7daa08-8885-418b-bff6-582ebb727b73" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.116:5353: connect: connection refused" Sep 29 19:25:08 crc kubenswrapper[4779]: I0929 19:25:08.120019 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qmfdb\" (UniqueName: \"kubernetes.io/projected/73a66bfb-fbcf-4e22-93aa-e8d91aa2892f-kube-api-access-qmfdb\") on node \"crc\" DevicePath \"\"" Sep 29 19:25:08 crc kubenswrapper[4779]: I0929 19:25:08.309187 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-698758b865-lww4s" Sep 29 19:25:08 crc kubenswrapper[4779]: I0929 19:25:08.424041 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7b7daa08-8885-418b-bff6-582ebb727b73-config\") pod \"7b7daa08-8885-418b-bff6-582ebb727b73\" (UID: \"7b7daa08-8885-418b-bff6-582ebb727b73\") " Sep 29 19:25:08 crc kubenswrapper[4779]: I0929 19:25:08.424120 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/7b7daa08-8885-418b-bff6-582ebb727b73-ovsdbserver-nb\") pod \"7b7daa08-8885-418b-bff6-582ebb727b73\" (UID: \"7b7daa08-8885-418b-bff6-582ebb727b73\") " Sep 29 19:25:08 crc kubenswrapper[4779]: I0929 19:25:08.424157 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kvshv\" (UniqueName: \"kubernetes.io/projected/7b7daa08-8885-418b-bff6-582ebb727b73-kube-api-access-kvshv\") pod \"7b7daa08-8885-418b-bff6-582ebb727b73\" (UID: \"7b7daa08-8885-418b-bff6-582ebb727b73\") " Sep 29 19:25:08 crc kubenswrapper[4779]: I0929 19:25:08.424234 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7b7daa08-8885-418b-bff6-582ebb727b73-dns-svc\") pod \"7b7daa08-8885-418b-bff6-582ebb727b73\" (UID: \"7b7daa08-8885-418b-bff6-582ebb727b73\") " Sep 29 19:25:08 crc kubenswrapper[4779]: I0929 19:25:08.424256 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/7b7daa08-8885-418b-bff6-582ebb727b73-ovsdbserver-sb\") pod \"7b7daa08-8885-418b-bff6-582ebb727b73\" (UID: \"7b7daa08-8885-418b-bff6-582ebb727b73\") " Sep 29 19:25:08 crc kubenswrapper[4779]: I0929 19:25:08.430773 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7b7daa08-8885-418b-bff6-582ebb727b73-kube-api-access-kvshv" (OuterVolumeSpecName: "kube-api-access-kvshv") pod "7b7daa08-8885-418b-bff6-582ebb727b73" (UID: "7b7daa08-8885-418b-bff6-582ebb727b73"). InnerVolumeSpecName "kube-api-access-kvshv". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:25:08 crc kubenswrapper[4779]: I0929 19:25:08.446619 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-k7hwz" event={"ID":"73a66bfb-fbcf-4e22-93aa-e8d91aa2892f","Type":"ContainerDied","Data":"f6f0e0da8896cebdb65faab10a7b21f0f7a0891bee42b29e262ead58c4579f4f"} Sep 29 19:25:08 crc kubenswrapper[4779]: I0929 19:25:08.446650 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-k7hwz" Sep 29 19:25:08 crc kubenswrapper[4779]: I0929 19:25:08.446657 4779 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f6f0e0da8896cebdb65faab10a7b21f0f7a0891bee42b29e262ead58c4579f4f" Sep 29 19:25:08 crc kubenswrapper[4779]: I0929 19:25:08.449670 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-4ww89" event={"ID":"e8978916-c429-4c70-8f74-93e9a49a8ae7","Type":"ContainerStarted","Data":"f5576d4845e19e471e67a51c575cf3f68e5ab6e7ae2249b9d9ebf065fd358162"} Sep 29 19:25:08 crc kubenswrapper[4779]: I0929 19:25:08.451551 4779 generic.go:334] "Generic (PLEG): container finished" podID="2eea816e-5b9e-4646-bd6a-2421436d9c90" containerID="d247d11fd42cdb449ad4242ad724d481e72a91c40d19d74e638d5bae1b2ec898" exitCode=0 Sep 29 19:25:08 crc kubenswrapper[4779]: I0929 19:25:08.451634 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-kqr8g" event={"ID":"2eea816e-5b9e-4646-bd6a-2421436d9c90","Type":"ContainerDied","Data":"d247d11fd42cdb449ad4242ad724d481e72a91c40d19d74e638d5bae1b2ec898"} Sep 29 19:25:08 crc kubenswrapper[4779]: I0929 19:25:08.458437 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-zrrxv" Sep 29 19:25:08 crc kubenswrapper[4779]: I0929 19:25:08.458465 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-zrrxv" event={"ID":"8267d69e-ea3c-4782-93d0-0bcf9a95bdf1","Type":"ContainerDied","Data":"22b9e6d428afd37bde27da4d62a550c72fee5165c9c621ece6398bdd1ddeb96c"} Sep 29 19:25:08 crc kubenswrapper[4779]: I0929 19:25:08.458493 4779 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="22b9e6d428afd37bde27da4d62a550c72fee5165c9c621ece6398bdd1ddeb96c" Sep 29 19:25:08 crc kubenswrapper[4779]: I0929 19:25:08.461421 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-7ttv2" event={"ID":"1eb16ed1-d3da-47d3-bb78-26e3b67fbc90","Type":"ContainerDied","Data":"00288761496e9fbabfdade118c21c94b663ce1f03f8498842bcf062ad75033b6"} Sep 29 19:25:08 crc kubenswrapper[4779]: I0929 19:25:08.461463 4779 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="00288761496e9fbabfdade118c21c94b663ce1f03f8498842bcf062ad75033b6" Sep 29 19:25:08 crc kubenswrapper[4779]: I0929 19:25:08.461530 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-7ttv2" Sep 29 19:25:08 crc kubenswrapper[4779]: I0929 19:25:08.477145 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7b7daa08-8885-418b-bff6-582ebb727b73-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "7b7daa08-8885-418b-bff6-582ebb727b73" (UID: "7b7daa08-8885-418b-bff6-582ebb727b73"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:25:08 crc kubenswrapper[4779]: I0929 19:25:08.479778 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7b7daa08-8885-418b-bff6-582ebb727b73-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "7b7daa08-8885-418b-bff6-582ebb727b73" (UID: "7b7daa08-8885-418b-bff6-582ebb727b73"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:25:08 crc kubenswrapper[4779]: I0929 19:25:08.482028 4779 generic.go:334] "Generic (PLEG): container finished" podID="7b7daa08-8885-418b-bff6-582ebb727b73" containerID="1377035065f0110ebd717796769b740e2864a55c93408ade32341b22f1fb7a77" exitCode=0 Sep 29 19:25:08 crc kubenswrapper[4779]: I0929 19:25:08.482648 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-698758b865-lww4s" event={"ID":"7b7daa08-8885-418b-bff6-582ebb727b73","Type":"ContainerDied","Data":"1377035065f0110ebd717796769b740e2864a55c93408ade32341b22f1fb7a77"} Sep 29 19:25:08 crc kubenswrapper[4779]: I0929 19:25:08.482689 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-698758b865-lww4s" event={"ID":"7b7daa08-8885-418b-bff6-582ebb727b73","Type":"ContainerDied","Data":"79cb62dd210f71b2ed7fa939eb7fc7cfabbb48110ccd17223c1f01aea5105fc5"} Sep 29 19:25:08 crc kubenswrapper[4779]: I0929 19:25:08.482711 4779 scope.go:117] "RemoveContainer" containerID="1377035065f0110ebd717796769b740e2864a55c93408ade32341b22f1fb7a77" Sep 29 19:25:08 crc kubenswrapper[4779]: I0929 19:25:08.482726 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-698758b865-lww4s" Sep 29 19:25:08 crc kubenswrapper[4779]: I0929 19:25:08.502229 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7b7daa08-8885-418b-bff6-582ebb727b73-config" (OuterVolumeSpecName: "config") pod "7b7daa08-8885-418b-bff6-582ebb727b73" (UID: "7b7daa08-8885-418b-bff6-582ebb727b73"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:25:08 crc kubenswrapper[4779]: I0929 19:25:08.508366 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-db-sync-4ww89" podStartSLOduration=2.3725373530000002 podStartE2EDuration="6.508350065s" podCreationTimestamp="2025-09-29 19:25:02 +0000 UTC" firstStartedPulling="2025-09-29 19:25:03.546487713 +0000 UTC m=+1014.430912813" lastFinishedPulling="2025-09-29 19:25:07.682300385 +0000 UTC m=+1018.566725525" observedRunningTime="2025-09-29 19:25:08.478120315 +0000 UTC m=+1019.362545425" watchObservedRunningTime="2025-09-29 19:25:08.508350065 +0000 UTC m=+1019.392775165" Sep 29 19:25:08 crc kubenswrapper[4779]: I0929 19:25:08.512541 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7b7daa08-8885-418b-bff6-582ebb727b73-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "7b7daa08-8885-418b-bff6-582ebb727b73" (UID: "7b7daa08-8885-418b-bff6-582ebb727b73"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:25:08 crc kubenswrapper[4779]: I0929 19:25:08.520063 4779 scope.go:117] "RemoveContainer" containerID="07d31420cf1c45d6f2bfcad4a0187d984d83498f9cef86c30640a0f64c1370e7" Sep 29 19:25:08 crc kubenswrapper[4779]: I0929 19:25:08.526315 4779 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7b7daa08-8885-418b-bff6-582ebb727b73-config\") on node \"crc\" DevicePath \"\"" Sep 29 19:25:08 crc kubenswrapper[4779]: I0929 19:25:08.526369 4779 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/7b7daa08-8885-418b-bff6-582ebb727b73-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Sep 29 19:25:08 crc kubenswrapper[4779]: I0929 19:25:08.526386 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kvshv\" (UniqueName: \"kubernetes.io/projected/7b7daa08-8885-418b-bff6-582ebb727b73-kube-api-access-kvshv\") on node \"crc\" DevicePath \"\"" Sep 29 19:25:08 crc kubenswrapper[4779]: I0929 19:25:08.526397 4779 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7b7daa08-8885-418b-bff6-582ebb727b73-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 29 19:25:08 crc kubenswrapper[4779]: I0929 19:25:08.526409 4779 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/7b7daa08-8885-418b-bff6-582ebb727b73-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Sep 29 19:25:08 crc kubenswrapper[4779]: I0929 19:25:08.536214 4779 scope.go:117] "RemoveContainer" containerID="1377035065f0110ebd717796769b740e2864a55c93408ade32341b22f1fb7a77" Sep 29 19:25:08 crc kubenswrapper[4779]: E0929 19:25:08.536712 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1377035065f0110ebd717796769b740e2864a55c93408ade32341b22f1fb7a77\": container with ID starting with 1377035065f0110ebd717796769b740e2864a55c93408ade32341b22f1fb7a77 not found: ID does not exist" containerID="1377035065f0110ebd717796769b740e2864a55c93408ade32341b22f1fb7a77" Sep 29 19:25:08 crc kubenswrapper[4779]: I0929 19:25:08.536774 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1377035065f0110ebd717796769b740e2864a55c93408ade32341b22f1fb7a77"} err="failed to get container status \"1377035065f0110ebd717796769b740e2864a55c93408ade32341b22f1fb7a77\": rpc error: code = NotFound desc = could not find container \"1377035065f0110ebd717796769b740e2864a55c93408ade32341b22f1fb7a77\": container with ID starting with 1377035065f0110ebd717796769b740e2864a55c93408ade32341b22f1fb7a77 not found: ID does not exist" Sep 29 19:25:08 crc kubenswrapper[4779]: I0929 19:25:08.536793 4779 scope.go:117] "RemoveContainer" containerID="07d31420cf1c45d6f2bfcad4a0187d984d83498f9cef86c30640a0f64c1370e7" Sep 29 19:25:08 crc kubenswrapper[4779]: E0929 19:25:08.537205 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"07d31420cf1c45d6f2bfcad4a0187d984d83498f9cef86c30640a0f64c1370e7\": container with ID starting with 07d31420cf1c45d6f2bfcad4a0187d984d83498f9cef86c30640a0f64c1370e7 not found: ID does not exist" containerID="07d31420cf1c45d6f2bfcad4a0187d984d83498f9cef86c30640a0f64c1370e7" Sep 29 19:25:08 crc kubenswrapper[4779]: I0929 19:25:08.537230 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"07d31420cf1c45d6f2bfcad4a0187d984d83498f9cef86c30640a0f64c1370e7"} err="failed to get container status \"07d31420cf1c45d6f2bfcad4a0187d984d83498f9cef86c30640a0f64c1370e7\": rpc error: code = NotFound desc = could not find container \"07d31420cf1c45d6f2bfcad4a0187d984d83498f9cef86c30640a0f64c1370e7\": container with ID starting with 07d31420cf1c45d6f2bfcad4a0187d984d83498f9cef86c30640a0f64c1370e7 not found: ID does not exist" Sep 29 19:25:08 crc kubenswrapper[4779]: I0929 19:25:08.830542 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-698758b865-lww4s"] Sep 29 19:25:08 crc kubenswrapper[4779]: I0929 19:25:08.838008 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-698758b865-lww4s"] Sep 29 19:25:09 crc kubenswrapper[4779]: I0929 19:25:09.797001 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7b7daa08-8885-418b-bff6-582ebb727b73" path="/var/lib/kubelet/pods/7b7daa08-8885-418b-bff6-582ebb727b73/volumes" Sep 29 19:25:09 crc kubenswrapper[4779]: I0929 19:25:09.924558 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-kqr8g" Sep 29 19:25:10 crc kubenswrapper[4779]: I0929 19:25:10.061704 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/2eea816e-5b9e-4646-bd6a-2421436d9c90-db-sync-config-data\") pod \"2eea816e-5b9e-4646-bd6a-2421436d9c90\" (UID: \"2eea816e-5b9e-4646-bd6a-2421436d9c90\") " Sep 29 19:25:10 crc kubenswrapper[4779]: I0929 19:25:10.061803 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2eea816e-5b9e-4646-bd6a-2421436d9c90-config-data\") pod \"2eea816e-5b9e-4646-bd6a-2421436d9c90\" (UID: \"2eea816e-5b9e-4646-bd6a-2421436d9c90\") " Sep 29 19:25:10 crc kubenswrapper[4779]: I0929 19:25:10.062052 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2nhlf\" (UniqueName: \"kubernetes.io/projected/2eea816e-5b9e-4646-bd6a-2421436d9c90-kube-api-access-2nhlf\") pod \"2eea816e-5b9e-4646-bd6a-2421436d9c90\" (UID: \"2eea816e-5b9e-4646-bd6a-2421436d9c90\") " Sep 29 19:25:10 crc kubenswrapper[4779]: I0929 19:25:10.062095 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2eea816e-5b9e-4646-bd6a-2421436d9c90-combined-ca-bundle\") pod \"2eea816e-5b9e-4646-bd6a-2421436d9c90\" (UID: \"2eea816e-5b9e-4646-bd6a-2421436d9c90\") " Sep 29 19:25:10 crc kubenswrapper[4779]: I0929 19:25:10.076492 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2eea816e-5b9e-4646-bd6a-2421436d9c90-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "2eea816e-5b9e-4646-bd6a-2421436d9c90" (UID: "2eea816e-5b9e-4646-bd6a-2421436d9c90"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:25:10 crc kubenswrapper[4779]: I0929 19:25:10.076617 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2eea816e-5b9e-4646-bd6a-2421436d9c90-kube-api-access-2nhlf" (OuterVolumeSpecName: "kube-api-access-2nhlf") pod "2eea816e-5b9e-4646-bd6a-2421436d9c90" (UID: "2eea816e-5b9e-4646-bd6a-2421436d9c90"). InnerVolumeSpecName "kube-api-access-2nhlf". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:25:10 crc kubenswrapper[4779]: I0929 19:25:10.102844 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2eea816e-5b9e-4646-bd6a-2421436d9c90-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "2eea816e-5b9e-4646-bd6a-2421436d9c90" (UID: "2eea816e-5b9e-4646-bd6a-2421436d9c90"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:25:10 crc kubenswrapper[4779]: I0929 19:25:10.141934 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2eea816e-5b9e-4646-bd6a-2421436d9c90-config-data" (OuterVolumeSpecName: "config-data") pod "2eea816e-5b9e-4646-bd6a-2421436d9c90" (UID: "2eea816e-5b9e-4646-bd6a-2421436d9c90"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:25:10 crc kubenswrapper[4779]: I0929 19:25:10.166293 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2nhlf\" (UniqueName: \"kubernetes.io/projected/2eea816e-5b9e-4646-bd6a-2421436d9c90-kube-api-access-2nhlf\") on node \"crc\" DevicePath \"\"" Sep 29 19:25:10 crc kubenswrapper[4779]: I0929 19:25:10.166348 4779 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2eea816e-5b9e-4646-bd6a-2421436d9c90-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 19:25:10 crc kubenswrapper[4779]: I0929 19:25:10.166363 4779 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/2eea816e-5b9e-4646-bd6a-2421436d9c90-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 19:25:10 crc kubenswrapper[4779]: I0929 19:25:10.166375 4779 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2eea816e-5b9e-4646-bd6a-2421436d9c90-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 19:25:10 crc kubenswrapper[4779]: I0929 19:25:10.502441 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-kqr8g" event={"ID":"2eea816e-5b9e-4646-bd6a-2421436d9c90","Type":"ContainerDied","Data":"8c00e755279c0952ee70d67896278cffb2a2418b3a4c4701c61f8fa8a018fe16"} Sep 29 19:25:10 crc kubenswrapper[4779]: I0929 19:25:10.502480 4779 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8c00e755279c0952ee70d67896278cffb2a2418b3a4c4701c61f8fa8a018fe16" Sep 29 19:25:10 crc kubenswrapper[4779]: I0929 19:25:10.502543 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-kqr8g" Sep 29 19:25:10 crc kubenswrapper[4779]: I0929 19:25:10.916515 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-7ff5475cc9-h22ps"] Sep 29 19:25:10 crc kubenswrapper[4779]: E0929 19:25:10.917073 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1eb16ed1-d3da-47d3-bb78-26e3b67fbc90" containerName="mariadb-database-create" Sep 29 19:25:10 crc kubenswrapper[4779]: I0929 19:25:10.917085 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="1eb16ed1-d3da-47d3-bb78-26e3b67fbc90" containerName="mariadb-database-create" Sep 29 19:25:10 crc kubenswrapper[4779]: E0929 19:25:10.917099 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2eea816e-5b9e-4646-bd6a-2421436d9c90" containerName="glance-db-sync" Sep 29 19:25:10 crc kubenswrapper[4779]: I0929 19:25:10.917105 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="2eea816e-5b9e-4646-bd6a-2421436d9c90" containerName="glance-db-sync" Sep 29 19:25:10 crc kubenswrapper[4779]: E0929 19:25:10.917113 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7b7daa08-8885-418b-bff6-582ebb727b73" containerName="init" Sep 29 19:25:10 crc kubenswrapper[4779]: I0929 19:25:10.917120 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="7b7daa08-8885-418b-bff6-582ebb727b73" containerName="init" Sep 29 19:25:10 crc kubenswrapper[4779]: E0929 19:25:10.917139 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8267d69e-ea3c-4782-93d0-0bcf9a95bdf1" containerName="mariadb-database-create" Sep 29 19:25:10 crc kubenswrapper[4779]: I0929 19:25:10.917145 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="8267d69e-ea3c-4782-93d0-0bcf9a95bdf1" containerName="mariadb-database-create" Sep 29 19:25:10 crc kubenswrapper[4779]: E0929 19:25:10.917156 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7b7daa08-8885-418b-bff6-582ebb727b73" containerName="dnsmasq-dns" Sep 29 19:25:10 crc kubenswrapper[4779]: I0929 19:25:10.917161 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="7b7daa08-8885-418b-bff6-582ebb727b73" containerName="dnsmasq-dns" Sep 29 19:25:10 crc kubenswrapper[4779]: E0929 19:25:10.917181 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="73a66bfb-fbcf-4e22-93aa-e8d91aa2892f" containerName="mariadb-database-create" Sep 29 19:25:10 crc kubenswrapper[4779]: I0929 19:25:10.917187 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="73a66bfb-fbcf-4e22-93aa-e8d91aa2892f" containerName="mariadb-database-create" Sep 29 19:25:10 crc kubenswrapper[4779]: I0929 19:25:10.917339 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="7b7daa08-8885-418b-bff6-582ebb727b73" containerName="dnsmasq-dns" Sep 29 19:25:10 crc kubenswrapper[4779]: I0929 19:25:10.917352 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="8267d69e-ea3c-4782-93d0-0bcf9a95bdf1" containerName="mariadb-database-create" Sep 29 19:25:10 crc kubenswrapper[4779]: I0929 19:25:10.917366 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="1eb16ed1-d3da-47d3-bb78-26e3b67fbc90" containerName="mariadb-database-create" Sep 29 19:25:10 crc kubenswrapper[4779]: I0929 19:25:10.917373 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="2eea816e-5b9e-4646-bd6a-2421436d9c90" containerName="glance-db-sync" Sep 29 19:25:10 crc kubenswrapper[4779]: I0929 19:25:10.917384 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="73a66bfb-fbcf-4e22-93aa-e8d91aa2892f" containerName="mariadb-database-create" Sep 29 19:25:10 crc kubenswrapper[4779]: I0929 19:25:10.920832 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7ff5475cc9-h22ps" Sep 29 19:25:10 crc kubenswrapper[4779]: I0929 19:25:10.942960 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7ff5475cc9-h22ps"] Sep 29 19:25:10 crc kubenswrapper[4779]: I0929 19:25:10.978196 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/fe3112c6-e01a-4173-b7d2-4a80b7779650-dns-svc\") pod \"dnsmasq-dns-7ff5475cc9-h22ps\" (UID: \"fe3112c6-e01a-4173-b7d2-4a80b7779650\") " pod="openstack/dnsmasq-dns-7ff5475cc9-h22ps" Sep 29 19:25:10 crc kubenswrapper[4779]: I0929 19:25:10.978264 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bvfzq\" (UniqueName: \"kubernetes.io/projected/fe3112c6-e01a-4173-b7d2-4a80b7779650-kube-api-access-bvfzq\") pod \"dnsmasq-dns-7ff5475cc9-h22ps\" (UID: \"fe3112c6-e01a-4173-b7d2-4a80b7779650\") " pod="openstack/dnsmasq-dns-7ff5475cc9-h22ps" Sep 29 19:25:10 crc kubenswrapper[4779]: I0929 19:25:10.978295 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/fe3112c6-e01a-4173-b7d2-4a80b7779650-ovsdbserver-nb\") pod \"dnsmasq-dns-7ff5475cc9-h22ps\" (UID: \"fe3112c6-e01a-4173-b7d2-4a80b7779650\") " pod="openstack/dnsmasq-dns-7ff5475cc9-h22ps" Sep 29 19:25:10 crc kubenswrapper[4779]: I0929 19:25:10.978360 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/fe3112c6-e01a-4173-b7d2-4a80b7779650-dns-swift-storage-0\") pod \"dnsmasq-dns-7ff5475cc9-h22ps\" (UID: \"fe3112c6-e01a-4173-b7d2-4a80b7779650\") " pod="openstack/dnsmasq-dns-7ff5475cc9-h22ps" Sep 29 19:25:10 crc kubenswrapper[4779]: I0929 19:25:10.978383 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/fe3112c6-e01a-4173-b7d2-4a80b7779650-ovsdbserver-sb\") pod \"dnsmasq-dns-7ff5475cc9-h22ps\" (UID: \"fe3112c6-e01a-4173-b7d2-4a80b7779650\") " pod="openstack/dnsmasq-dns-7ff5475cc9-h22ps" Sep 29 19:25:10 crc kubenswrapper[4779]: I0929 19:25:10.978420 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fe3112c6-e01a-4173-b7d2-4a80b7779650-config\") pod \"dnsmasq-dns-7ff5475cc9-h22ps\" (UID: \"fe3112c6-e01a-4173-b7d2-4a80b7779650\") " pod="openstack/dnsmasq-dns-7ff5475cc9-h22ps" Sep 29 19:25:11 crc kubenswrapper[4779]: I0929 19:25:11.080082 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/fe3112c6-e01a-4173-b7d2-4a80b7779650-dns-svc\") pod \"dnsmasq-dns-7ff5475cc9-h22ps\" (UID: \"fe3112c6-e01a-4173-b7d2-4a80b7779650\") " pod="openstack/dnsmasq-dns-7ff5475cc9-h22ps" Sep 29 19:25:11 crc kubenswrapper[4779]: I0929 19:25:11.080162 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bvfzq\" (UniqueName: \"kubernetes.io/projected/fe3112c6-e01a-4173-b7d2-4a80b7779650-kube-api-access-bvfzq\") pod \"dnsmasq-dns-7ff5475cc9-h22ps\" (UID: \"fe3112c6-e01a-4173-b7d2-4a80b7779650\") " pod="openstack/dnsmasq-dns-7ff5475cc9-h22ps" Sep 29 19:25:11 crc kubenswrapper[4779]: I0929 19:25:11.080225 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/fe3112c6-e01a-4173-b7d2-4a80b7779650-ovsdbserver-nb\") pod \"dnsmasq-dns-7ff5475cc9-h22ps\" (UID: \"fe3112c6-e01a-4173-b7d2-4a80b7779650\") " pod="openstack/dnsmasq-dns-7ff5475cc9-h22ps" Sep 29 19:25:11 crc kubenswrapper[4779]: I0929 19:25:11.080246 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/fe3112c6-e01a-4173-b7d2-4a80b7779650-dns-swift-storage-0\") pod \"dnsmasq-dns-7ff5475cc9-h22ps\" (UID: \"fe3112c6-e01a-4173-b7d2-4a80b7779650\") " pod="openstack/dnsmasq-dns-7ff5475cc9-h22ps" Sep 29 19:25:11 crc kubenswrapper[4779]: I0929 19:25:11.080271 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/fe3112c6-e01a-4173-b7d2-4a80b7779650-ovsdbserver-sb\") pod \"dnsmasq-dns-7ff5475cc9-h22ps\" (UID: \"fe3112c6-e01a-4173-b7d2-4a80b7779650\") " pod="openstack/dnsmasq-dns-7ff5475cc9-h22ps" Sep 29 19:25:11 crc kubenswrapper[4779]: I0929 19:25:11.080313 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fe3112c6-e01a-4173-b7d2-4a80b7779650-config\") pod \"dnsmasq-dns-7ff5475cc9-h22ps\" (UID: \"fe3112c6-e01a-4173-b7d2-4a80b7779650\") " pod="openstack/dnsmasq-dns-7ff5475cc9-h22ps" Sep 29 19:25:11 crc kubenswrapper[4779]: I0929 19:25:11.081137 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fe3112c6-e01a-4173-b7d2-4a80b7779650-config\") pod \"dnsmasq-dns-7ff5475cc9-h22ps\" (UID: \"fe3112c6-e01a-4173-b7d2-4a80b7779650\") " pod="openstack/dnsmasq-dns-7ff5475cc9-h22ps" Sep 29 19:25:11 crc kubenswrapper[4779]: I0929 19:25:11.081204 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/fe3112c6-e01a-4173-b7d2-4a80b7779650-ovsdbserver-nb\") pod \"dnsmasq-dns-7ff5475cc9-h22ps\" (UID: \"fe3112c6-e01a-4173-b7d2-4a80b7779650\") " pod="openstack/dnsmasq-dns-7ff5475cc9-h22ps" Sep 29 19:25:11 crc kubenswrapper[4779]: I0929 19:25:11.081607 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/fe3112c6-e01a-4173-b7d2-4a80b7779650-dns-svc\") pod \"dnsmasq-dns-7ff5475cc9-h22ps\" (UID: \"fe3112c6-e01a-4173-b7d2-4a80b7779650\") " pod="openstack/dnsmasq-dns-7ff5475cc9-h22ps" Sep 29 19:25:11 crc kubenswrapper[4779]: I0929 19:25:11.081826 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/fe3112c6-e01a-4173-b7d2-4a80b7779650-dns-swift-storage-0\") pod \"dnsmasq-dns-7ff5475cc9-h22ps\" (UID: \"fe3112c6-e01a-4173-b7d2-4a80b7779650\") " pod="openstack/dnsmasq-dns-7ff5475cc9-h22ps" Sep 29 19:25:11 crc kubenswrapper[4779]: I0929 19:25:11.081871 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/fe3112c6-e01a-4173-b7d2-4a80b7779650-ovsdbserver-sb\") pod \"dnsmasq-dns-7ff5475cc9-h22ps\" (UID: \"fe3112c6-e01a-4173-b7d2-4a80b7779650\") " pod="openstack/dnsmasq-dns-7ff5475cc9-h22ps" Sep 29 19:25:11 crc kubenswrapper[4779]: I0929 19:25:11.111649 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bvfzq\" (UniqueName: \"kubernetes.io/projected/fe3112c6-e01a-4173-b7d2-4a80b7779650-kube-api-access-bvfzq\") pod \"dnsmasq-dns-7ff5475cc9-h22ps\" (UID: \"fe3112c6-e01a-4173-b7d2-4a80b7779650\") " pod="openstack/dnsmasq-dns-7ff5475cc9-h22ps" Sep 29 19:25:11 crc kubenswrapper[4779]: I0929 19:25:11.247995 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7ff5475cc9-h22ps" Sep 29 19:25:11 crc kubenswrapper[4779]: I0929 19:25:11.515079 4779 generic.go:334] "Generic (PLEG): container finished" podID="e8978916-c429-4c70-8f74-93e9a49a8ae7" containerID="f5576d4845e19e471e67a51c575cf3f68e5ab6e7ae2249b9d9ebf065fd358162" exitCode=0 Sep 29 19:25:11 crc kubenswrapper[4779]: I0929 19:25:11.515135 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-4ww89" event={"ID":"e8978916-c429-4c70-8f74-93e9a49a8ae7","Type":"ContainerDied","Data":"f5576d4845e19e471e67a51c575cf3f68e5ab6e7ae2249b9d9ebf065fd358162"} Sep 29 19:25:11 crc kubenswrapper[4779]: W0929 19:25:11.683674 4779 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podfe3112c6_e01a_4173_b7d2_4a80b7779650.slice/crio-cab61bba6794fc13b96fdc1409ba6add0663ca6c37ffa5ccd9bf752b2a880398 WatchSource:0}: Error finding container cab61bba6794fc13b96fdc1409ba6add0663ca6c37ffa5ccd9bf752b2a880398: Status 404 returned error can't find the container with id cab61bba6794fc13b96fdc1409ba6add0663ca6c37ffa5ccd9bf752b2a880398 Sep 29 19:25:11 crc kubenswrapper[4779]: I0929 19:25:11.691057 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7ff5475cc9-h22ps"] Sep 29 19:25:12 crc kubenswrapper[4779]: I0929 19:25:12.525968 4779 generic.go:334] "Generic (PLEG): container finished" podID="fe3112c6-e01a-4173-b7d2-4a80b7779650" containerID="2312a39eecacc3e4bd478766f9a00f3df8677d4d1dcaa0a2ae31349e6e3bbd84" exitCode=0 Sep 29 19:25:12 crc kubenswrapper[4779]: I0929 19:25:12.526082 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7ff5475cc9-h22ps" event={"ID":"fe3112c6-e01a-4173-b7d2-4a80b7779650","Type":"ContainerDied","Data":"2312a39eecacc3e4bd478766f9a00f3df8677d4d1dcaa0a2ae31349e6e3bbd84"} Sep 29 19:25:12 crc kubenswrapper[4779]: I0929 19:25:12.526509 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7ff5475cc9-h22ps" event={"ID":"fe3112c6-e01a-4173-b7d2-4a80b7779650","Type":"ContainerStarted","Data":"cab61bba6794fc13b96fdc1409ba6add0663ca6c37ffa5ccd9bf752b2a880398"} Sep 29 19:25:12 crc kubenswrapper[4779]: I0929 19:25:12.836499 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-4ww89" Sep 29 19:25:12 crc kubenswrapper[4779]: I0929 19:25:12.907660 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e8978916-c429-4c70-8f74-93e9a49a8ae7-combined-ca-bundle\") pod \"e8978916-c429-4c70-8f74-93e9a49a8ae7\" (UID: \"e8978916-c429-4c70-8f74-93e9a49a8ae7\") " Sep 29 19:25:12 crc kubenswrapper[4779]: I0929 19:25:12.907832 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e8978916-c429-4c70-8f74-93e9a49a8ae7-config-data\") pod \"e8978916-c429-4c70-8f74-93e9a49a8ae7\" (UID: \"e8978916-c429-4c70-8f74-93e9a49a8ae7\") " Sep 29 19:25:12 crc kubenswrapper[4779]: I0929 19:25:12.907856 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dcj6q\" (UniqueName: \"kubernetes.io/projected/e8978916-c429-4c70-8f74-93e9a49a8ae7-kube-api-access-dcj6q\") pod \"e8978916-c429-4c70-8f74-93e9a49a8ae7\" (UID: \"e8978916-c429-4c70-8f74-93e9a49a8ae7\") " Sep 29 19:25:12 crc kubenswrapper[4779]: I0929 19:25:12.912397 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e8978916-c429-4c70-8f74-93e9a49a8ae7-kube-api-access-dcj6q" (OuterVolumeSpecName: "kube-api-access-dcj6q") pod "e8978916-c429-4c70-8f74-93e9a49a8ae7" (UID: "e8978916-c429-4c70-8f74-93e9a49a8ae7"). InnerVolumeSpecName "kube-api-access-dcj6q". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:25:12 crc kubenswrapper[4779]: I0929 19:25:12.936587 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e8978916-c429-4c70-8f74-93e9a49a8ae7-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e8978916-c429-4c70-8f74-93e9a49a8ae7" (UID: "e8978916-c429-4c70-8f74-93e9a49a8ae7"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:25:12 crc kubenswrapper[4779]: I0929 19:25:12.960247 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e8978916-c429-4c70-8f74-93e9a49a8ae7-config-data" (OuterVolumeSpecName: "config-data") pod "e8978916-c429-4c70-8f74-93e9a49a8ae7" (UID: "e8978916-c429-4c70-8f74-93e9a49a8ae7"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:25:13 crc kubenswrapper[4779]: I0929 19:25:13.009993 4779 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e8978916-c429-4c70-8f74-93e9a49a8ae7-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 19:25:13 crc kubenswrapper[4779]: I0929 19:25:13.010304 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dcj6q\" (UniqueName: \"kubernetes.io/projected/e8978916-c429-4c70-8f74-93e9a49a8ae7-kube-api-access-dcj6q\") on node \"crc\" DevicePath \"\"" Sep 29 19:25:13 crc kubenswrapper[4779]: I0929 19:25:13.010458 4779 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e8978916-c429-4c70-8f74-93e9a49a8ae7-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 19:25:13 crc kubenswrapper[4779]: I0929 19:25:13.535951 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-4ww89" event={"ID":"e8978916-c429-4c70-8f74-93e9a49a8ae7","Type":"ContainerDied","Data":"28b471dfae2933d92a78b84f17636fb4e341675fa5daa82eefbb6012f142284f"} Sep 29 19:25:13 crc kubenswrapper[4779]: I0929 19:25:13.537017 4779 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="28b471dfae2933d92a78b84f17636fb4e341675fa5daa82eefbb6012f142284f" Sep 29 19:25:13 crc kubenswrapper[4779]: I0929 19:25:13.535985 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-4ww89" Sep 29 19:25:13 crc kubenswrapper[4779]: I0929 19:25:13.538506 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7ff5475cc9-h22ps" event={"ID":"fe3112c6-e01a-4173-b7d2-4a80b7779650","Type":"ContainerStarted","Data":"725d38e5a9769b886fd1125e875021106089614da15c0f2d4f4b618399055eeb"} Sep 29 19:25:13 crc kubenswrapper[4779]: I0929 19:25:13.538632 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-7ff5475cc9-h22ps" Sep 29 19:25:13 crc kubenswrapper[4779]: I0929 19:25:13.569134 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-7ff5475cc9-h22ps" podStartSLOduration=3.5691181910000003 podStartE2EDuration="3.569118191s" podCreationTimestamp="2025-09-29 19:25:10 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 19:25:13.564194207 +0000 UTC m=+1024.448619357" watchObservedRunningTime="2025-09-29 19:25:13.569118191 +0000 UTC m=+1024.453543291" Sep 29 19:25:13 crc kubenswrapper[4779]: I0929 19:25:13.785373 4779 patch_prober.go:28] interesting pod/machine-config-daemon-d5cnr container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 19:25:13 crc kubenswrapper[4779]: I0929 19:25:13.785430 4779 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" podUID="476bc421-1113-455e-bcc8-e207e47dad19" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 19:25:13 crc kubenswrapper[4779]: I0929 19:25:13.816899 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7ff5475cc9-h22ps"] Sep 29 19:25:13 crc kubenswrapper[4779]: I0929 19:25:13.849444 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-bootstrap-cq5wz"] Sep 29 19:25:13 crc kubenswrapper[4779]: E0929 19:25:13.850062 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e8978916-c429-4c70-8f74-93e9a49a8ae7" containerName="keystone-db-sync" Sep 29 19:25:13 crc kubenswrapper[4779]: I0929 19:25:13.850083 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="e8978916-c429-4c70-8f74-93e9a49a8ae7" containerName="keystone-db-sync" Sep 29 19:25:13 crc kubenswrapper[4779]: I0929 19:25:13.850352 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="e8978916-c429-4c70-8f74-93e9a49a8ae7" containerName="keystone-db-sync" Sep 29 19:25:13 crc kubenswrapper[4779]: I0929 19:25:13.850963 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-cq5wz" Sep 29 19:25:13 crc kubenswrapper[4779]: I0929 19:25:13.854203 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Sep 29 19:25:13 crc kubenswrapper[4779]: I0929 19:25:13.855083 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-m8zpl" Sep 29 19:25:13 crc kubenswrapper[4779]: I0929 19:25:13.855727 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Sep 29 19:25:13 crc kubenswrapper[4779]: I0929 19:25:13.856022 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Sep 29 19:25:13 crc kubenswrapper[4779]: I0929 19:25:13.866531 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5c5cc7c5ff-65hpw"] Sep 29 19:25:13 crc kubenswrapper[4779]: I0929 19:25:13.867991 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5c5cc7c5ff-65hpw" Sep 29 19:25:13 crc kubenswrapper[4779]: I0929 19:25:13.878404 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-cq5wz"] Sep 29 19:25:13 crc kubenswrapper[4779]: I0929 19:25:13.892747 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5c5cc7c5ff-65hpw"] Sep 29 19:25:13 crc kubenswrapper[4779]: I0929 19:25:13.931016 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/104158e9-b32f-4455-9e88-4adacf0dd01a-ovsdbserver-sb\") pod \"dnsmasq-dns-5c5cc7c5ff-65hpw\" (UID: \"104158e9-b32f-4455-9e88-4adacf0dd01a\") " pod="openstack/dnsmasq-dns-5c5cc7c5ff-65hpw" Sep 29 19:25:13 crc kubenswrapper[4779]: I0929 19:25:13.931059 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fxpsg\" (UniqueName: \"kubernetes.io/projected/3616fd00-99ce-4801-bdc8-90174bac56ba-kube-api-access-fxpsg\") pod \"keystone-bootstrap-cq5wz\" (UID: \"3616fd00-99ce-4801-bdc8-90174bac56ba\") " pod="openstack/keystone-bootstrap-cq5wz" Sep 29 19:25:13 crc kubenswrapper[4779]: I0929 19:25:13.931156 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/104158e9-b32f-4455-9e88-4adacf0dd01a-ovsdbserver-nb\") pod \"dnsmasq-dns-5c5cc7c5ff-65hpw\" (UID: \"104158e9-b32f-4455-9e88-4adacf0dd01a\") " pod="openstack/dnsmasq-dns-5c5cc7c5ff-65hpw" Sep 29 19:25:13 crc kubenswrapper[4779]: I0929 19:25:13.931183 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3616fd00-99ce-4801-bdc8-90174bac56ba-config-data\") pod \"keystone-bootstrap-cq5wz\" (UID: \"3616fd00-99ce-4801-bdc8-90174bac56ba\") " pod="openstack/keystone-bootstrap-cq5wz" Sep 29 19:25:13 crc kubenswrapper[4779]: I0929 19:25:13.931407 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/3616fd00-99ce-4801-bdc8-90174bac56ba-credential-keys\") pod \"keystone-bootstrap-cq5wz\" (UID: \"3616fd00-99ce-4801-bdc8-90174bac56ba\") " pod="openstack/keystone-bootstrap-cq5wz" Sep 29 19:25:13 crc kubenswrapper[4779]: I0929 19:25:13.931478 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3616fd00-99ce-4801-bdc8-90174bac56ba-combined-ca-bundle\") pod \"keystone-bootstrap-cq5wz\" (UID: \"3616fd00-99ce-4801-bdc8-90174bac56ba\") " pod="openstack/keystone-bootstrap-cq5wz" Sep 29 19:25:13 crc kubenswrapper[4779]: I0929 19:25:13.931510 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/3616fd00-99ce-4801-bdc8-90174bac56ba-fernet-keys\") pod \"keystone-bootstrap-cq5wz\" (UID: \"3616fd00-99ce-4801-bdc8-90174bac56ba\") " pod="openstack/keystone-bootstrap-cq5wz" Sep 29 19:25:13 crc kubenswrapper[4779]: I0929 19:25:13.931531 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hjcnx\" (UniqueName: \"kubernetes.io/projected/104158e9-b32f-4455-9e88-4adacf0dd01a-kube-api-access-hjcnx\") pod \"dnsmasq-dns-5c5cc7c5ff-65hpw\" (UID: \"104158e9-b32f-4455-9e88-4adacf0dd01a\") " pod="openstack/dnsmasq-dns-5c5cc7c5ff-65hpw" Sep 29 19:25:13 crc kubenswrapper[4779]: I0929 19:25:13.931547 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/104158e9-b32f-4455-9e88-4adacf0dd01a-dns-svc\") pod \"dnsmasq-dns-5c5cc7c5ff-65hpw\" (UID: \"104158e9-b32f-4455-9e88-4adacf0dd01a\") " pod="openstack/dnsmasq-dns-5c5cc7c5ff-65hpw" Sep 29 19:25:13 crc kubenswrapper[4779]: I0929 19:25:13.931570 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3616fd00-99ce-4801-bdc8-90174bac56ba-scripts\") pod \"keystone-bootstrap-cq5wz\" (UID: \"3616fd00-99ce-4801-bdc8-90174bac56ba\") " pod="openstack/keystone-bootstrap-cq5wz" Sep 29 19:25:13 crc kubenswrapper[4779]: I0929 19:25:13.931591 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/104158e9-b32f-4455-9e88-4adacf0dd01a-dns-swift-storage-0\") pod \"dnsmasq-dns-5c5cc7c5ff-65hpw\" (UID: \"104158e9-b32f-4455-9e88-4adacf0dd01a\") " pod="openstack/dnsmasq-dns-5c5cc7c5ff-65hpw" Sep 29 19:25:13 crc kubenswrapper[4779]: I0929 19:25:13.931609 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/104158e9-b32f-4455-9e88-4adacf0dd01a-config\") pod \"dnsmasq-dns-5c5cc7c5ff-65hpw\" (UID: \"104158e9-b32f-4455-9e88-4adacf0dd01a\") " pod="openstack/dnsmasq-dns-5c5cc7c5ff-65hpw" Sep 29 19:25:14 crc kubenswrapper[4779]: I0929 19:25:14.033338 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/3616fd00-99ce-4801-bdc8-90174bac56ba-credential-keys\") pod \"keystone-bootstrap-cq5wz\" (UID: \"3616fd00-99ce-4801-bdc8-90174bac56ba\") " pod="openstack/keystone-bootstrap-cq5wz" Sep 29 19:25:14 crc kubenswrapper[4779]: I0929 19:25:14.033401 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3616fd00-99ce-4801-bdc8-90174bac56ba-combined-ca-bundle\") pod \"keystone-bootstrap-cq5wz\" (UID: \"3616fd00-99ce-4801-bdc8-90174bac56ba\") " pod="openstack/keystone-bootstrap-cq5wz" Sep 29 19:25:14 crc kubenswrapper[4779]: I0929 19:25:14.033432 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/3616fd00-99ce-4801-bdc8-90174bac56ba-fernet-keys\") pod \"keystone-bootstrap-cq5wz\" (UID: \"3616fd00-99ce-4801-bdc8-90174bac56ba\") " pod="openstack/keystone-bootstrap-cq5wz" Sep 29 19:25:14 crc kubenswrapper[4779]: I0929 19:25:14.033454 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hjcnx\" (UniqueName: \"kubernetes.io/projected/104158e9-b32f-4455-9e88-4adacf0dd01a-kube-api-access-hjcnx\") pod \"dnsmasq-dns-5c5cc7c5ff-65hpw\" (UID: \"104158e9-b32f-4455-9e88-4adacf0dd01a\") " pod="openstack/dnsmasq-dns-5c5cc7c5ff-65hpw" Sep 29 19:25:14 crc kubenswrapper[4779]: I0929 19:25:14.033472 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/104158e9-b32f-4455-9e88-4adacf0dd01a-dns-svc\") pod \"dnsmasq-dns-5c5cc7c5ff-65hpw\" (UID: \"104158e9-b32f-4455-9e88-4adacf0dd01a\") " pod="openstack/dnsmasq-dns-5c5cc7c5ff-65hpw" Sep 29 19:25:14 crc kubenswrapper[4779]: I0929 19:25:14.033496 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3616fd00-99ce-4801-bdc8-90174bac56ba-scripts\") pod \"keystone-bootstrap-cq5wz\" (UID: \"3616fd00-99ce-4801-bdc8-90174bac56ba\") " pod="openstack/keystone-bootstrap-cq5wz" Sep 29 19:25:14 crc kubenswrapper[4779]: I0929 19:25:14.033517 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/104158e9-b32f-4455-9e88-4adacf0dd01a-dns-swift-storage-0\") pod \"dnsmasq-dns-5c5cc7c5ff-65hpw\" (UID: \"104158e9-b32f-4455-9e88-4adacf0dd01a\") " pod="openstack/dnsmasq-dns-5c5cc7c5ff-65hpw" Sep 29 19:25:14 crc kubenswrapper[4779]: I0929 19:25:14.033531 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/104158e9-b32f-4455-9e88-4adacf0dd01a-config\") pod \"dnsmasq-dns-5c5cc7c5ff-65hpw\" (UID: \"104158e9-b32f-4455-9e88-4adacf0dd01a\") " pod="openstack/dnsmasq-dns-5c5cc7c5ff-65hpw" Sep 29 19:25:14 crc kubenswrapper[4779]: I0929 19:25:14.033557 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/104158e9-b32f-4455-9e88-4adacf0dd01a-ovsdbserver-sb\") pod \"dnsmasq-dns-5c5cc7c5ff-65hpw\" (UID: \"104158e9-b32f-4455-9e88-4adacf0dd01a\") " pod="openstack/dnsmasq-dns-5c5cc7c5ff-65hpw" Sep 29 19:25:14 crc kubenswrapper[4779]: I0929 19:25:14.033580 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fxpsg\" (UniqueName: \"kubernetes.io/projected/3616fd00-99ce-4801-bdc8-90174bac56ba-kube-api-access-fxpsg\") pod \"keystone-bootstrap-cq5wz\" (UID: \"3616fd00-99ce-4801-bdc8-90174bac56ba\") " pod="openstack/keystone-bootstrap-cq5wz" Sep 29 19:25:14 crc kubenswrapper[4779]: I0929 19:25:14.033603 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/104158e9-b32f-4455-9e88-4adacf0dd01a-ovsdbserver-nb\") pod \"dnsmasq-dns-5c5cc7c5ff-65hpw\" (UID: \"104158e9-b32f-4455-9e88-4adacf0dd01a\") " pod="openstack/dnsmasq-dns-5c5cc7c5ff-65hpw" Sep 29 19:25:14 crc kubenswrapper[4779]: I0929 19:25:14.033618 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3616fd00-99ce-4801-bdc8-90174bac56ba-config-data\") pod \"keystone-bootstrap-cq5wz\" (UID: \"3616fd00-99ce-4801-bdc8-90174bac56ba\") " pod="openstack/keystone-bootstrap-cq5wz" Sep 29 19:25:14 crc kubenswrapper[4779]: I0929 19:25:14.035071 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/104158e9-b32f-4455-9e88-4adacf0dd01a-ovsdbserver-sb\") pod \"dnsmasq-dns-5c5cc7c5ff-65hpw\" (UID: \"104158e9-b32f-4455-9e88-4adacf0dd01a\") " pod="openstack/dnsmasq-dns-5c5cc7c5ff-65hpw" Sep 29 19:25:14 crc kubenswrapper[4779]: I0929 19:25:14.035618 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/104158e9-b32f-4455-9e88-4adacf0dd01a-dns-swift-storage-0\") pod \"dnsmasq-dns-5c5cc7c5ff-65hpw\" (UID: \"104158e9-b32f-4455-9e88-4adacf0dd01a\") " pod="openstack/dnsmasq-dns-5c5cc7c5ff-65hpw" Sep 29 19:25:14 crc kubenswrapper[4779]: I0929 19:25:14.036118 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/104158e9-b32f-4455-9e88-4adacf0dd01a-config\") pod \"dnsmasq-dns-5c5cc7c5ff-65hpw\" (UID: \"104158e9-b32f-4455-9e88-4adacf0dd01a\") " pod="openstack/dnsmasq-dns-5c5cc7c5ff-65hpw" Sep 29 19:25:14 crc kubenswrapper[4779]: I0929 19:25:14.038544 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/104158e9-b32f-4455-9e88-4adacf0dd01a-ovsdbserver-nb\") pod \"dnsmasq-dns-5c5cc7c5ff-65hpw\" (UID: \"104158e9-b32f-4455-9e88-4adacf0dd01a\") " pod="openstack/dnsmasq-dns-5c5cc7c5ff-65hpw" Sep 29 19:25:14 crc kubenswrapper[4779]: I0929 19:25:14.041580 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/104158e9-b32f-4455-9e88-4adacf0dd01a-dns-svc\") pod \"dnsmasq-dns-5c5cc7c5ff-65hpw\" (UID: \"104158e9-b32f-4455-9e88-4adacf0dd01a\") " pod="openstack/dnsmasq-dns-5c5cc7c5ff-65hpw" Sep 29 19:25:14 crc kubenswrapper[4779]: I0929 19:25:14.044957 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3616fd00-99ce-4801-bdc8-90174bac56ba-config-data\") pod \"keystone-bootstrap-cq5wz\" (UID: \"3616fd00-99ce-4801-bdc8-90174bac56ba\") " pod="openstack/keystone-bootstrap-cq5wz" Sep 29 19:25:14 crc kubenswrapper[4779]: I0929 19:25:14.057310 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3616fd00-99ce-4801-bdc8-90174bac56ba-scripts\") pod \"keystone-bootstrap-cq5wz\" (UID: \"3616fd00-99ce-4801-bdc8-90174bac56ba\") " pod="openstack/keystone-bootstrap-cq5wz" Sep 29 19:25:14 crc kubenswrapper[4779]: I0929 19:25:14.062044 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3616fd00-99ce-4801-bdc8-90174bac56ba-combined-ca-bundle\") pod \"keystone-bootstrap-cq5wz\" (UID: \"3616fd00-99ce-4801-bdc8-90174bac56ba\") " pod="openstack/keystone-bootstrap-cq5wz" Sep 29 19:25:14 crc kubenswrapper[4779]: I0929 19:25:14.081969 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/3616fd00-99ce-4801-bdc8-90174bac56ba-fernet-keys\") pod \"keystone-bootstrap-cq5wz\" (UID: \"3616fd00-99ce-4801-bdc8-90174bac56ba\") " pod="openstack/keystone-bootstrap-cq5wz" Sep 29 19:25:14 crc kubenswrapper[4779]: I0929 19:25:14.082298 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/3616fd00-99ce-4801-bdc8-90174bac56ba-credential-keys\") pod \"keystone-bootstrap-cq5wz\" (UID: \"3616fd00-99ce-4801-bdc8-90174bac56ba\") " pod="openstack/keystone-bootstrap-cq5wz" Sep 29 19:25:14 crc kubenswrapper[4779]: I0929 19:25:14.089989 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fxpsg\" (UniqueName: \"kubernetes.io/projected/3616fd00-99ce-4801-bdc8-90174bac56ba-kube-api-access-fxpsg\") pod \"keystone-bootstrap-cq5wz\" (UID: \"3616fd00-99ce-4801-bdc8-90174bac56ba\") " pod="openstack/keystone-bootstrap-cq5wz" Sep 29 19:25:14 crc kubenswrapper[4779]: I0929 19:25:14.097035 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hjcnx\" (UniqueName: \"kubernetes.io/projected/104158e9-b32f-4455-9e88-4adacf0dd01a-kube-api-access-hjcnx\") pod \"dnsmasq-dns-5c5cc7c5ff-65hpw\" (UID: \"104158e9-b32f-4455-9e88-4adacf0dd01a\") " pod="openstack/dnsmasq-dns-5c5cc7c5ff-65hpw" Sep 29 19:25:14 crc kubenswrapper[4779]: I0929 19:25:14.099448 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-59998c8f8c-xtjqn"] Sep 29 19:25:14 crc kubenswrapper[4779]: I0929 19:25:14.100898 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-59998c8f8c-xtjqn" Sep 29 19:25:14 crc kubenswrapper[4779]: I0929 19:25:14.115023 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-59998c8f8c-xtjqn"] Sep 29 19:25:14 crc kubenswrapper[4779]: I0929 19:25:14.118796 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"horizon-config-data" Sep 29 19:25:14 crc kubenswrapper[4779]: I0929 19:25:14.119405 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"horizon-horizon-dockercfg-6zz6x" Sep 29 19:25:14 crc kubenswrapper[4779]: I0929 19:25:14.122363 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"horizon-scripts" Sep 29 19:25:14 crc kubenswrapper[4779]: I0929 19:25:14.122610 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"horizon" Sep 29 19:25:14 crc kubenswrapper[4779]: I0929 19:25:14.178211 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-cq5wz" Sep 29 19:25:14 crc kubenswrapper[4779]: I0929 19:25:14.191089 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5c5cc7c5ff-65hpw" Sep 29 19:25:14 crc kubenswrapper[4779]: I0929 19:25:14.247987 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/259eb000-7d67-43dc-9736-6ae36eb29098-config-data\") pod \"horizon-59998c8f8c-xtjqn\" (UID: \"259eb000-7d67-43dc-9736-6ae36eb29098\") " pod="openstack/horizon-59998c8f8c-xtjqn" Sep 29 19:25:14 crc kubenswrapper[4779]: I0929 19:25:14.248067 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/259eb000-7d67-43dc-9736-6ae36eb29098-scripts\") pod \"horizon-59998c8f8c-xtjqn\" (UID: \"259eb000-7d67-43dc-9736-6ae36eb29098\") " pod="openstack/horizon-59998c8f8c-xtjqn" Sep 29 19:25:14 crc kubenswrapper[4779]: I0929 19:25:14.248087 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g7hkf\" (UniqueName: \"kubernetes.io/projected/259eb000-7d67-43dc-9736-6ae36eb29098-kube-api-access-g7hkf\") pod \"horizon-59998c8f8c-xtjqn\" (UID: \"259eb000-7d67-43dc-9736-6ae36eb29098\") " pod="openstack/horizon-59998c8f8c-xtjqn" Sep 29 19:25:14 crc kubenswrapper[4779]: I0929 19:25:14.248108 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/259eb000-7d67-43dc-9736-6ae36eb29098-horizon-secret-key\") pod \"horizon-59998c8f8c-xtjqn\" (UID: \"259eb000-7d67-43dc-9736-6ae36eb29098\") " pod="openstack/horizon-59998c8f8c-xtjqn" Sep 29 19:25:14 crc kubenswrapper[4779]: I0929 19:25:14.248188 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/259eb000-7d67-43dc-9736-6ae36eb29098-logs\") pod \"horizon-59998c8f8c-xtjqn\" (UID: \"259eb000-7d67-43dc-9736-6ae36eb29098\") " pod="openstack/horizon-59998c8f8c-xtjqn" Sep 29 19:25:14 crc kubenswrapper[4779]: I0929 19:25:14.268076 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Sep 29 19:25:14 crc kubenswrapper[4779]: I0929 19:25:14.280287 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 19:25:14 crc kubenswrapper[4779]: I0929 19:25:14.302794 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Sep 29 19:25:14 crc kubenswrapper[4779]: I0929 19:25:14.302991 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Sep 29 19:25:14 crc kubenswrapper[4779]: I0929 19:25:14.303610 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-5f7856dd6f-tbtnj"] Sep 29 19:25:14 crc kubenswrapper[4779]: I0929 19:25:14.306075 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-5f7856dd6f-tbtnj" Sep 29 19:25:14 crc kubenswrapper[4779]: I0929 19:25:14.348706 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 29 19:25:14 crc kubenswrapper[4779]: I0929 19:25:14.349246 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/259eb000-7d67-43dc-9736-6ae36eb29098-config-data\") pod \"horizon-59998c8f8c-xtjqn\" (UID: \"259eb000-7d67-43dc-9736-6ae36eb29098\") " pod="openstack/horizon-59998c8f8c-xtjqn" Sep 29 19:25:14 crc kubenswrapper[4779]: I0929 19:25:14.349296 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t5qlh\" (UniqueName: \"kubernetes.io/projected/ab4fe873-f58e-4b0f-8d40-1ac1be214b0c-kube-api-access-t5qlh\") pod \"ceilometer-0\" (UID: \"ab4fe873-f58e-4b0f-8d40-1ac1be214b0c\") " pod="openstack/ceilometer-0" Sep 29 19:25:14 crc kubenswrapper[4779]: I0929 19:25:14.349342 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ab4fe873-f58e-4b0f-8d40-1ac1be214b0c-log-httpd\") pod \"ceilometer-0\" (UID: \"ab4fe873-f58e-4b0f-8d40-1ac1be214b0c\") " pod="openstack/ceilometer-0" Sep 29 19:25:14 crc kubenswrapper[4779]: I0929 19:25:14.349376 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ab4fe873-f58e-4b0f-8d40-1ac1be214b0c-scripts\") pod \"ceilometer-0\" (UID: \"ab4fe873-f58e-4b0f-8d40-1ac1be214b0c\") " pod="openstack/ceilometer-0" Sep 29 19:25:14 crc kubenswrapper[4779]: I0929 19:25:14.349395 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/259eb000-7d67-43dc-9736-6ae36eb29098-scripts\") pod \"horizon-59998c8f8c-xtjqn\" (UID: \"259eb000-7d67-43dc-9736-6ae36eb29098\") " pod="openstack/horizon-59998c8f8c-xtjqn" Sep 29 19:25:14 crc kubenswrapper[4779]: I0929 19:25:14.349415 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g7hkf\" (UniqueName: \"kubernetes.io/projected/259eb000-7d67-43dc-9736-6ae36eb29098-kube-api-access-g7hkf\") pod \"horizon-59998c8f8c-xtjqn\" (UID: \"259eb000-7d67-43dc-9736-6ae36eb29098\") " pod="openstack/horizon-59998c8f8c-xtjqn" Sep 29 19:25:14 crc kubenswrapper[4779]: I0929 19:25:14.349438 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/259eb000-7d67-43dc-9736-6ae36eb29098-horizon-secret-key\") pod \"horizon-59998c8f8c-xtjqn\" (UID: \"259eb000-7d67-43dc-9736-6ae36eb29098\") " pod="openstack/horizon-59998c8f8c-xtjqn" Sep 29 19:25:14 crc kubenswrapper[4779]: I0929 19:25:14.349452 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ab4fe873-f58e-4b0f-8d40-1ac1be214b0c-config-data\") pod \"ceilometer-0\" (UID: \"ab4fe873-f58e-4b0f-8d40-1ac1be214b0c\") " pod="openstack/ceilometer-0" Sep 29 19:25:14 crc kubenswrapper[4779]: I0929 19:25:14.349473 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ab4fe873-f58e-4b0f-8d40-1ac1be214b0c-run-httpd\") pod \"ceilometer-0\" (UID: \"ab4fe873-f58e-4b0f-8d40-1ac1be214b0c\") " pod="openstack/ceilometer-0" Sep 29 19:25:14 crc kubenswrapper[4779]: I0929 19:25:14.349511 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ab4fe873-f58e-4b0f-8d40-1ac1be214b0c-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"ab4fe873-f58e-4b0f-8d40-1ac1be214b0c\") " pod="openstack/ceilometer-0" Sep 29 19:25:14 crc kubenswrapper[4779]: I0929 19:25:14.349550 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/259eb000-7d67-43dc-9736-6ae36eb29098-logs\") pod \"horizon-59998c8f8c-xtjqn\" (UID: \"259eb000-7d67-43dc-9736-6ae36eb29098\") " pod="openstack/horizon-59998c8f8c-xtjqn" Sep 29 19:25:14 crc kubenswrapper[4779]: I0929 19:25:14.349574 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/ab4fe873-f58e-4b0f-8d40-1ac1be214b0c-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"ab4fe873-f58e-4b0f-8d40-1ac1be214b0c\") " pod="openstack/ceilometer-0" Sep 29 19:25:14 crc kubenswrapper[4779]: I0929 19:25:14.350885 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/259eb000-7d67-43dc-9736-6ae36eb29098-config-data\") pod \"horizon-59998c8f8c-xtjqn\" (UID: \"259eb000-7d67-43dc-9736-6ae36eb29098\") " pod="openstack/horizon-59998c8f8c-xtjqn" Sep 29 19:25:14 crc kubenswrapper[4779]: I0929 19:25:14.351281 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/259eb000-7d67-43dc-9736-6ae36eb29098-scripts\") pod \"horizon-59998c8f8c-xtjqn\" (UID: \"259eb000-7d67-43dc-9736-6ae36eb29098\") " pod="openstack/horizon-59998c8f8c-xtjqn" Sep 29 19:25:14 crc kubenswrapper[4779]: I0929 19:25:14.351539 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/259eb000-7d67-43dc-9736-6ae36eb29098-logs\") pod \"horizon-59998c8f8c-xtjqn\" (UID: \"259eb000-7d67-43dc-9736-6ae36eb29098\") " pod="openstack/horizon-59998c8f8c-xtjqn" Sep 29 19:25:14 crc kubenswrapper[4779]: I0929 19:25:14.364232 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/259eb000-7d67-43dc-9736-6ae36eb29098-horizon-secret-key\") pod \"horizon-59998c8f8c-xtjqn\" (UID: \"259eb000-7d67-43dc-9736-6ae36eb29098\") " pod="openstack/horizon-59998c8f8c-xtjqn" Sep 29 19:25:14 crc kubenswrapper[4779]: I0929 19:25:14.379382 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-5f7856dd6f-tbtnj"] Sep 29 19:25:14 crc kubenswrapper[4779]: I0929 19:25:14.379980 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g7hkf\" (UniqueName: \"kubernetes.io/projected/259eb000-7d67-43dc-9736-6ae36eb29098-kube-api-access-g7hkf\") pod \"horizon-59998c8f8c-xtjqn\" (UID: \"259eb000-7d67-43dc-9736-6ae36eb29098\") " pod="openstack/horizon-59998c8f8c-xtjqn" Sep 29 19:25:14 crc kubenswrapper[4779]: I0929 19:25:14.450884 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/2a8f0557-90ad-401d-8bd2-702615ae52d9-config-data\") pod \"horizon-5f7856dd6f-tbtnj\" (UID: \"2a8f0557-90ad-401d-8bd2-702615ae52d9\") " pod="openstack/horizon-5f7856dd6f-tbtnj" Sep 29 19:25:14 crc kubenswrapper[4779]: I0929 19:25:14.450937 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/ab4fe873-f58e-4b0f-8d40-1ac1be214b0c-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"ab4fe873-f58e-4b0f-8d40-1ac1be214b0c\") " pod="openstack/ceilometer-0" Sep 29 19:25:14 crc kubenswrapper[4779]: I0929 19:25:14.450960 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2a8f0557-90ad-401d-8bd2-702615ae52d9-logs\") pod \"horizon-5f7856dd6f-tbtnj\" (UID: \"2a8f0557-90ad-401d-8bd2-702615ae52d9\") " pod="openstack/horizon-5f7856dd6f-tbtnj" Sep 29 19:25:14 crc kubenswrapper[4779]: I0929 19:25:14.451007 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/2a8f0557-90ad-401d-8bd2-702615ae52d9-scripts\") pod \"horizon-5f7856dd6f-tbtnj\" (UID: \"2a8f0557-90ad-401d-8bd2-702615ae52d9\") " pod="openstack/horizon-5f7856dd6f-tbtnj" Sep 29 19:25:14 crc kubenswrapper[4779]: I0929 19:25:14.451031 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t5qlh\" (UniqueName: \"kubernetes.io/projected/ab4fe873-f58e-4b0f-8d40-1ac1be214b0c-kube-api-access-t5qlh\") pod \"ceilometer-0\" (UID: \"ab4fe873-f58e-4b0f-8d40-1ac1be214b0c\") " pod="openstack/ceilometer-0" Sep 29 19:25:14 crc kubenswrapper[4779]: I0929 19:25:14.451066 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ab4fe873-f58e-4b0f-8d40-1ac1be214b0c-log-httpd\") pod \"ceilometer-0\" (UID: \"ab4fe873-f58e-4b0f-8d40-1ac1be214b0c\") " pod="openstack/ceilometer-0" Sep 29 19:25:14 crc kubenswrapper[4779]: I0929 19:25:14.451099 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/2a8f0557-90ad-401d-8bd2-702615ae52d9-horizon-secret-key\") pod \"horizon-5f7856dd6f-tbtnj\" (UID: \"2a8f0557-90ad-401d-8bd2-702615ae52d9\") " pod="openstack/horizon-5f7856dd6f-tbtnj" Sep 29 19:25:14 crc kubenswrapper[4779]: I0929 19:25:14.451124 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ab4fe873-f58e-4b0f-8d40-1ac1be214b0c-scripts\") pod \"ceilometer-0\" (UID: \"ab4fe873-f58e-4b0f-8d40-1ac1be214b0c\") " pod="openstack/ceilometer-0" Sep 29 19:25:14 crc kubenswrapper[4779]: I0929 19:25:14.451144 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ab4fe873-f58e-4b0f-8d40-1ac1be214b0c-config-data\") pod \"ceilometer-0\" (UID: \"ab4fe873-f58e-4b0f-8d40-1ac1be214b0c\") " pod="openstack/ceilometer-0" Sep 29 19:25:14 crc kubenswrapper[4779]: I0929 19:25:14.451164 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ab4fe873-f58e-4b0f-8d40-1ac1be214b0c-run-httpd\") pod \"ceilometer-0\" (UID: \"ab4fe873-f58e-4b0f-8d40-1ac1be214b0c\") " pod="openstack/ceilometer-0" Sep 29 19:25:14 crc kubenswrapper[4779]: I0929 19:25:14.451202 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ab4fe873-f58e-4b0f-8d40-1ac1be214b0c-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"ab4fe873-f58e-4b0f-8d40-1ac1be214b0c\") " pod="openstack/ceilometer-0" Sep 29 19:25:14 crc kubenswrapper[4779]: I0929 19:25:14.451223 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c7bkb\" (UniqueName: \"kubernetes.io/projected/2a8f0557-90ad-401d-8bd2-702615ae52d9-kube-api-access-c7bkb\") pod \"horizon-5f7856dd6f-tbtnj\" (UID: \"2a8f0557-90ad-401d-8bd2-702615ae52d9\") " pod="openstack/horizon-5f7856dd6f-tbtnj" Sep 29 19:25:14 crc kubenswrapper[4779]: I0929 19:25:14.452539 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ab4fe873-f58e-4b0f-8d40-1ac1be214b0c-log-httpd\") pod \"ceilometer-0\" (UID: \"ab4fe873-f58e-4b0f-8d40-1ac1be214b0c\") " pod="openstack/ceilometer-0" Sep 29 19:25:14 crc kubenswrapper[4779]: I0929 19:25:14.455840 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5c5cc7c5ff-65hpw"] Sep 29 19:25:14 crc kubenswrapper[4779]: I0929 19:25:14.456923 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ab4fe873-f58e-4b0f-8d40-1ac1be214b0c-run-httpd\") pod \"ceilometer-0\" (UID: \"ab4fe873-f58e-4b0f-8d40-1ac1be214b0c\") " pod="openstack/ceilometer-0" Sep 29 19:25:14 crc kubenswrapper[4779]: I0929 19:25:14.457135 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ab4fe873-f58e-4b0f-8d40-1ac1be214b0c-config-data\") pod \"ceilometer-0\" (UID: \"ab4fe873-f58e-4b0f-8d40-1ac1be214b0c\") " pod="openstack/ceilometer-0" Sep 29 19:25:14 crc kubenswrapper[4779]: I0929 19:25:14.459724 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/ab4fe873-f58e-4b0f-8d40-1ac1be214b0c-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"ab4fe873-f58e-4b0f-8d40-1ac1be214b0c\") " pod="openstack/ceilometer-0" Sep 29 19:25:14 crc kubenswrapper[4779]: I0929 19:25:14.461829 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ab4fe873-f58e-4b0f-8d40-1ac1be214b0c-scripts\") pod \"ceilometer-0\" (UID: \"ab4fe873-f58e-4b0f-8d40-1ac1be214b0c\") " pod="openstack/ceilometer-0" Sep 29 19:25:14 crc kubenswrapper[4779]: I0929 19:25:14.468719 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-59998c8f8c-xtjqn" Sep 29 19:25:14 crc kubenswrapper[4779]: I0929 19:25:14.480006 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ab4fe873-f58e-4b0f-8d40-1ac1be214b0c-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"ab4fe873-f58e-4b0f-8d40-1ac1be214b0c\") " pod="openstack/ceilometer-0" Sep 29 19:25:14 crc kubenswrapper[4779]: I0929 19:25:14.487877 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t5qlh\" (UniqueName: \"kubernetes.io/projected/ab4fe873-f58e-4b0f-8d40-1ac1be214b0c-kube-api-access-t5qlh\") pod \"ceilometer-0\" (UID: \"ab4fe873-f58e-4b0f-8d40-1ac1be214b0c\") " pod="openstack/ceilometer-0" Sep 29 19:25:14 crc kubenswrapper[4779]: I0929 19:25:14.504344 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-db-sync-xphf4"] Sep 29 19:25:14 crc kubenswrapper[4779]: I0929 19:25:14.505641 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-xphf4" Sep 29 19:25:14 crc kubenswrapper[4779]: I0929 19:25:14.516279 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-config-data" Sep 29 19:25:14 crc kubenswrapper[4779]: I0929 19:25:14.516543 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-placement-dockercfg-snlst" Sep 29 19:25:14 crc kubenswrapper[4779]: I0929 19:25:14.524397 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-scripts" Sep 29 19:25:14 crc kubenswrapper[4779]: I0929 19:25:14.524863 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-sync-xphf4"] Sep 29 19:25:14 crc kubenswrapper[4779]: I0929 19:25:14.544026 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Sep 29 19:25:14 crc kubenswrapper[4779]: I0929 19:25:14.546094 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Sep 29 19:25:14 crc kubenswrapper[4779]: I0929 19:25:14.548843 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-scripts" Sep 29 19:25:14 crc kubenswrapper[4779]: I0929 19:25:14.549248 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Sep 29 19:25:14 crc kubenswrapper[4779]: I0929 19:25:14.549395 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-mttwt" Sep 29 19:25:14 crc kubenswrapper[4779]: I0929 19:25:14.549486 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-public-svc" Sep 29 19:25:14 crc kubenswrapper[4779]: I0929 19:25:14.554480 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/48f7f919-2b5f-4954-8aa8-0b0abf3cb4fe-scripts\") pod \"placement-db-sync-xphf4\" (UID: \"48f7f919-2b5f-4954-8aa8-0b0abf3cb4fe\") " pod="openstack/placement-db-sync-xphf4" Sep 29 19:25:14 crc kubenswrapper[4779]: I0929 19:25:14.554548 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/48f7f919-2b5f-4954-8aa8-0b0abf3cb4fe-combined-ca-bundle\") pod \"placement-db-sync-xphf4\" (UID: \"48f7f919-2b5f-4954-8aa8-0b0abf3cb4fe\") " pod="openstack/placement-db-sync-xphf4" Sep 29 19:25:14 crc kubenswrapper[4779]: I0929 19:25:14.554614 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/48f7f919-2b5f-4954-8aa8-0b0abf3cb4fe-logs\") pod \"placement-db-sync-xphf4\" (UID: \"48f7f919-2b5f-4954-8aa8-0b0abf3cb4fe\") " pod="openstack/placement-db-sync-xphf4" Sep 29 19:25:14 crc kubenswrapper[4779]: I0929 19:25:14.554637 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c7bkb\" (UniqueName: \"kubernetes.io/projected/2a8f0557-90ad-401d-8bd2-702615ae52d9-kube-api-access-c7bkb\") pod \"horizon-5f7856dd6f-tbtnj\" (UID: \"2a8f0557-90ad-401d-8bd2-702615ae52d9\") " pod="openstack/horizon-5f7856dd6f-tbtnj" Sep 29 19:25:14 crc kubenswrapper[4779]: I0929 19:25:14.554714 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/2a8f0557-90ad-401d-8bd2-702615ae52d9-config-data\") pod \"horizon-5f7856dd6f-tbtnj\" (UID: \"2a8f0557-90ad-401d-8bd2-702615ae52d9\") " pod="openstack/horizon-5f7856dd6f-tbtnj" Sep 29 19:25:14 crc kubenswrapper[4779]: I0929 19:25:14.554759 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2a8f0557-90ad-401d-8bd2-702615ae52d9-logs\") pod \"horizon-5f7856dd6f-tbtnj\" (UID: \"2a8f0557-90ad-401d-8bd2-702615ae52d9\") " pod="openstack/horizon-5f7856dd6f-tbtnj" Sep 29 19:25:14 crc kubenswrapper[4779]: I0929 19:25:14.554776 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qng7w\" (UniqueName: \"kubernetes.io/projected/48f7f919-2b5f-4954-8aa8-0b0abf3cb4fe-kube-api-access-qng7w\") pod \"placement-db-sync-xphf4\" (UID: \"48f7f919-2b5f-4954-8aa8-0b0abf3cb4fe\") " pod="openstack/placement-db-sync-xphf4" Sep 29 19:25:14 crc kubenswrapper[4779]: I0929 19:25:14.554847 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/2a8f0557-90ad-401d-8bd2-702615ae52d9-scripts\") pod \"horizon-5f7856dd6f-tbtnj\" (UID: \"2a8f0557-90ad-401d-8bd2-702615ae52d9\") " pod="openstack/horizon-5f7856dd6f-tbtnj" Sep 29 19:25:14 crc kubenswrapper[4779]: I0929 19:25:14.554938 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/2a8f0557-90ad-401d-8bd2-702615ae52d9-horizon-secret-key\") pod \"horizon-5f7856dd6f-tbtnj\" (UID: \"2a8f0557-90ad-401d-8bd2-702615ae52d9\") " pod="openstack/horizon-5f7856dd6f-tbtnj" Sep 29 19:25:14 crc kubenswrapper[4779]: I0929 19:25:14.554956 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/48f7f919-2b5f-4954-8aa8-0b0abf3cb4fe-config-data\") pod \"placement-db-sync-xphf4\" (UID: \"48f7f919-2b5f-4954-8aa8-0b0abf3cb4fe\") " pod="openstack/placement-db-sync-xphf4" Sep 29 19:25:14 crc kubenswrapper[4779]: I0929 19:25:14.559402 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/2a8f0557-90ad-401d-8bd2-702615ae52d9-scripts\") pod \"horizon-5f7856dd6f-tbtnj\" (UID: \"2a8f0557-90ad-401d-8bd2-702615ae52d9\") " pod="openstack/horizon-5f7856dd6f-tbtnj" Sep 29 19:25:14 crc kubenswrapper[4779]: I0929 19:25:14.559461 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-8b5c85b87-pckt5"] Sep 29 19:25:14 crc kubenswrapper[4779]: I0929 19:25:14.559698 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/2a8f0557-90ad-401d-8bd2-702615ae52d9-config-data\") pod \"horizon-5f7856dd6f-tbtnj\" (UID: \"2a8f0557-90ad-401d-8bd2-702615ae52d9\") " pod="openstack/horizon-5f7856dd6f-tbtnj" Sep 29 19:25:14 crc kubenswrapper[4779]: I0929 19:25:14.559829 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2a8f0557-90ad-401d-8bd2-702615ae52d9-logs\") pod \"horizon-5f7856dd6f-tbtnj\" (UID: \"2a8f0557-90ad-401d-8bd2-702615ae52d9\") " pod="openstack/horizon-5f7856dd6f-tbtnj" Sep 29 19:25:14 crc kubenswrapper[4779]: I0929 19:25:14.562161 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8b5c85b87-pckt5" Sep 29 19:25:14 crc kubenswrapper[4779]: I0929 19:25:14.582297 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Sep 29 19:25:14 crc kubenswrapper[4779]: I0929 19:25:14.585177 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c7bkb\" (UniqueName: \"kubernetes.io/projected/2a8f0557-90ad-401d-8bd2-702615ae52d9-kube-api-access-c7bkb\") pod \"horizon-5f7856dd6f-tbtnj\" (UID: \"2a8f0557-90ad-401d-8bd2-702615ae52d9\") " pod="openstack/horizon-5f7856dd6f-tbtnj" Sep 29 19:25:14 crc kubenswrapper[4779]: I0929 19:25:14.585727 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/2a8f0557-90ad-401d-8bd2-702615ae52d9-horizon-secret-key\") pod \"horizon-5f7856dd6f-tbtnj\" (UID: \"2a8f0557-90ad-401d-8bd2-702615ae52d9\") " pod="openstack/horizon-5f7856dd6f-tbtnj" Sep 29 19:25:14 crc kubenswrapper[4779]: I0929 19:25:14.598887 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-8b5c85b87-pckt5"] Sep 29 19:25:14 crc kubenswrapper[4779]: I0929 19:25:14.656724 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"glance-default-external-api-0\" (UID: \"016ec3ae-ba64-4283-9dd1-1387254c5501\") " pod="openstack/glance-default-external-api-0" Sep 29 19:25:14 crc kubenswrapper[4779]: I0929 19:25:14.656793 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/48f7f919-2b5f-4954-8aa8-0b0abf3cb4fe-config-data\") pod \"placement-db-sync-xphf4\" (UID: \"48f7f919-2b5f-4954-8aa8-0b0abf3cb4fe\") " pod="openstack/placement-db-sync-xphf4" Sep 29 19:25:14 crc kubenswrapper[4779]: I0929 19:25:14.656818 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/016ec3ae-ba64-4283-9dd1-1387254c5501-scripts\") pod \"glance-default-external-api-0\" (UID: \"016ec3ae-ba64-4283-9dd1-1387254c5501\") " pod="openstack/glance-default-external-api-0" Sep 29 19:25:14 crc kubenswrapper[4779]: I0929 19:25:14.656871 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/48f7f919-2b5f-4954-8aa8-0b0abf3cb4fe-scripts\") pod \"placement-db-sync-xphf4\" (UID: \"48f7f919-2b5f-4954-8aa8-0b0abf3cb4fe\") " pod="openstack/placement-db-sync-xphf4" Sep 29 19:25:14 crc kubenswrapper[4779]: I0929 19:25:14.656908 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/5551aa76-af05-4686-b100-9b9f0664be70-dns-swift-storage-0\") pod \"dnsmasq-dns-8b5c85b87-pckt5\" (UID: \"5551aa76-af05-4686-b100-9b9f0664be70\") " pod="openstack/dnsmasq-dns-8b5c85b87-pckt5" Sep 29 19:25:14 crc kubenswrapper[4779]: I0929 19:25:14.656930 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/016ec3ae-ba64-4283-9dd1-1387254c5501-config-data\") pod \"glance-default-external-api-0\" (UID: \"016ec3ae-ba64-4283-9dd1-1387254c5501\") " pod="openstack/glance-default-external-api-0" Sep 29 19:25:14 crc kubenswrapper[4779]: I0929 19:25:14.656963 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/48f7f919-2b5f-4954-8aa8-0b0abf3cb4fe-combined-ca-bundle\") pod \"placement-db-sync-xphf4\" (UID: \"48f7f919-2b5f-4954-8aa8-0b0abf3cb4fe\") " pod="openstack/placement-db-sync-xphf4" Sep 29 19:25:14 crc kubenswrapper[4779]: I0929 19:25:14.656999 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/016ec3ae-ba64-4283-9dd1-1387254c5501-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"016ec3ae-ba64-4283-9dd1-1387254c5501\") " pod="openstack/glance-default-external-api-0" Sep 29 19:25:14 crc kubenswrapper[4779]: I0929 19:25:14.657035 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/48f7f919-2b5f-4954-8aa8-0b0abf3cb4fe-logs\") pod \"placement-db-sync-xphf4\" (UID: \"48f7f919-2b5f-4954-8aa8-0b0abf3cb4fe\") " pod="openstack/placement-db-sync-xphf4" Sep 29 19:25:14 crc kubenswrapper[4779]: I0929 19:25:14.657058 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5551aa76-af05-4686-b100-9b9f0664be70-dns-svc\") pod \"dnsmasq-dns-8b5c85b87-pckt5\" (UID: \"5551aa76-af05-4686-b100-9b9f0664be70\") " pod="openstack/dnsmasq-dns-8b5c85b87-pckt5" Sep 29 19:25:14 crc kubenswrapper[4779]: I0929 19:25:14.657075 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5551aa76-af05-4686-b100-9b9f0664be70-ovsdbserver-sb\") pod \"dnsmasq-dns-8b5c85b87-pckt5\" (UID: \"5551aa76-af05-4686-b100-9b9f0664be70\") " pod="openstack/dnsmasq-dns-8b5c85b87-pckt5" Sep 29 19:25:14 crc kubenswrapper[4779]: I0929 19:25:14.657115 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/016ec3ae-ba64-4283-9dd1-1387254c5501-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"016ec3ae-ba64-4283-9dd1-1387254c5501\") " pod="openstack/glance-default-external-api-0" Sep 29 19:25:14 crc kubenswrapper[4779]: I0929 19:25:14.657131 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/016ec3ae-ba64-4283-9dd1-1387254c5501-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"016ec3ae-ba64-4283-9dd1-1387254c5501\") " pod="openstack/glance-default-external-api-0" Sep 29 19:25:14 crc kubenswrapper[4779]: I0929 19:25:14.657181 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5551aa76-af05-4686-b100-9b9f0664be70-ovsdbserver-nb\") pod \"dnsmasq-dns-8b5c85b87-pckt5\" (UID: \"5551aa76-af05-4686-b100-9b9f0664be70\") " pod="openstack/dnsmasq-dns-8b5c85b87-pckt5" Sep 29 19:25:14 crc kubenswrapper[4779]: I0929 19:25:14.657197 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5551aa76-af05-4686-b100-9b9f0664be70-config\") pod \"dnsmasq-dns-8b5c85b87-pckt5\" (UID: \"5551aa76-af05-4686-b100-9b9f0664be70\") " pod="openstack/dnsmasq-dns-8b5c85b87-pckt5" Sep 29 19:25:14 crc kubenswrapper[4779]: I0929 19:25:14.657219 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/016ec3ae-ba64-4283-9dd1-1387254c5501-logs\") pod \"glance-default-external-api-0\" (UID: \"016ec3ae-ba64-4283-9dd1-1387254c5501\") " pod="openstack/glance-default-external-api-0" Sep 29 19:25:14 crc kubenswrapper[4779]: I0929 19:25:14.657239 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gmvrf\" (UniqueName: \"kubernetes.io/projected/5551aa76-af05-4686-b100-9b9f0664be70-kube-api-access-gmvrf\") pod \"dnsmasq-dns-8b5c85b87-pckt5\" (UID: \"5551aa76-af05-4686-b100-9b9f0664be70\") " pod="openstack/dnsmasq-dns-8b5c85b87-pckt5" Sep 29 19:25:14 crc kubenswrapper[4779]: I0929 19:25:14.657278 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qng7w\" (UniqueName: \"kubernetes.io/projected/48f7f919-2b5f-4954-8aa8-0b0abf3cb4fe-kube-api-access-qng7w\") pod \"placement-db-sync-xphf4\" (UID: \"48f7f919-2b5f-4954-8aa8-0b0abf3cb4fe\") " pod="openstack/placement-db-sync-xphf4" Sep 29 19:25:14 crc kubenswrapper[4779]: I0929 19:25:14.657303 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tgts6\" (UniqueName: \"kubernetes.io/projected/016ec3ae-ba64-4283-9dd1-1387254c5501-kube-api-access-tgts6\") pod \"glance-default-external-api-0\" (UID: \"016ec3ae-ba64-4283-9dd1-1387254c5501\") " pod="openstack/glance-default-external-api-0" Sep 29 19:25:14 crc kubenswrapper[4779]: I0929 19:25:14.658151 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/48f7f919-2b5f-4954-8aa8-0b0abf3cb4fe-logs\") pod \"placement-db-sync-xphf4\" (UID: \"48f7f919-2b5f-4954-8aa8-0b0abf3cb4fe\") " pod="openstack/placement-db-sync-xphf4" Sep 29 19:25:14 crc kubenswrapper[4779]: I0929 19:25:14.660360 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/48f7f919-2b5f-4954-8aa8-0b0abf3cb4fe-scripts\") pod \"placement-db-sync-xphf4\" (UID: \"48f7f919-2b5f-4954-8aa8-0b0abf3cb4fe\") " pod="openstack/placement-db-sync-xphf4" Sep 29 19:25:14 crc kubenswrapper[4779]: I0929 19:25:14.661167 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/48f7f919-2b5f-4954-8aa8-0b0abf3cb4fe-combined-ca-bundle\") pod \"placement-db-sync-xphf4\" (UID: \"48f7f919-2b5f-4954-8aa8-0b0abf3cb4fe\") " pod="openstack/placement-db-sync-xphf4" Sep 29 19:25:14 crc kubenswrapper[4779]: I0929 19:25:14.663970 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/48f7f919-2b5f-4954-8aa8-0b0abf3cb4fe-config-data\") pod \"placement-db-sync-xphf4\" (UID: \"48f7f919-2b5f-4954-8aa8-0b0abf3cb4fe\") " pod="openstack/placement-db-sync-xphf4" Sep 29 19:25:14 crc kubenswrapper[4779]: I0929 19:25:14.674545 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qng7w\" (UniqueName: \"kubernetes.io/projected/48f7f919-2b5f-4954-8aa8-0b0abf3cb4fe-kube-api-access-qng7w\") pod \"placement-db-sync-xphf4\" (UID: \"48f7f919-2b5f-4954-8aa8-0b0abf3cb4fe\") " pod="openstack/placement-db-sync-xphf4" Sep 29 19:25:14 crc kubenswrapper[4779]: I0929 19:25:14.732048 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 19:25:14 crc kubenswrapper[4779]: I0929 19:25:14.750802 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-5f7856dd6f-tbtnj" Sep 29 19:25:14 crc kubenswrapper[4779]: I0929 19:25:14.759007 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"glance-default-external-api-0\" (UID: \"016ec3ae-ba64-4283-9dd1-1387254c5501\") " pod="openstack/glance-default-external-api-0" Sep 29 19:25:14 crc kubenswrapper[4779]: I0929 19:25:14.759090 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/016ec3ae-ba64-4283-9dd1-1387254c5501-scripts\") pod \"glance-default-external-api-0\" (UID: \"016ec3ae-ba64-4283-9dd1-1387254c5501\") " pod="openstack/glance-default-external-api-0" Sep 29 19:25:14 crc kubenswrapper[4779]: I0929 19:25:14.759175 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/5551aa76-af05-4686-b100-9b9f0664be70-dns-swift-storage-0\") pod \"dnsmasq-dns-8b5c85b87-pckt5\" (UID: \"5551aa76-af05-4686-b100-9b9f0664be70\") " pod="openstack/dnsmasq-dns-8b5c85b87-pckt5" Sep 29 19:25:14 crc kubenswrapper[4779]: I0929 19:25:14.759222 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/016ec3ae-ba64-4283-9dd1-1387254c5501-config-data\") pod \"glance-default-external-api-0\" (UID: \"016ec3ae-ba64-4283-9dd1-1387254c5501\") " pod="openstack/glance-default-external-api-0" Sep 29 19:25:14 crc kubenswrapper[4779]: I0929 19:25:14.759262 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/016ec3ae-ba64-4283-9dd1-1387254c5501-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"016ec3ae-ba64-4283-9dd1-1387254c5501\") " pod="openstack/glance-default-external-api-0" Sep 29 19:25:14 crc kubenswrapper[4779]: I0929 19:25:14.759310 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5551aa76-af05-4686-b100-9b9f0664be70-dns-svc\") pod \"dnsmasq-dns-8b5c85b87-pckt5\" (UID: \"5551aa76-af05-4686-b100-9b9f0664be70\") " pod="openstack/dnsmasq-dns-8b5c85b87-pckt5" Sep 29 19:25:14 crc kubenswrapper[4779]: I0929 19:25:14.759359 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5551aa76-af05-4686-b100-9b9f0664be70-ovsdbserver-sb\") pod \"dnsmasq-dns-8b5c85b87-pckt5\" (UID: \"5551aa76-af05-4686-b100-9b9f0664be70\") " pod="openstack/dnsmasq-dns-8b5c85b87-pckt5" Sep 29 19:25:14 crc kubenswrapper[4779]: I0929 19:25:14.759381 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/016ec3ae-ba64-4283-9dd1-1387254c5501-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"016ec3ae-ba64-4283-9dd1-1387254c5501\") " pod="openstack/glance-default-external-api-0" Sep 29 19:25:14 crc kubenswrapper[4779]: I0929 19:25:14.759427 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/016ec3ae-ba64-4283-9dd1-1387254c5501-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"016ec3ae-ba64-4283-9dd1-1387254c5501\") " pod="openstack/glance-default-external-api-0" Sep 29 19:25:14 crc kubenswrapper[4779]: I0929 19:25:14.759469 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5551aa76-af05-4686-b100-9b9f0664be70-ovsdbserver-nb\") pod \"dnsmasq-dns-8b5c85b87-pckt5\" (UID: \"5551aa76-af05-4686-b100-9b9f0664be70\") " pod="openstack/dnsmasq-dns-8b5c85b87-pckt5" Sep 29 19:25:14 crc kubenswrapper[4779]: I0929 19:25:14.759516 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5551aa76-af05-4686-b100-9b9f0664be70-config\") pod \"dnsmasq-dns-8b5c85b87-pckt5\" (UID: \"5551aa76-af05-4686-b100-9b9f0664be70\") " pod="openstack/dnsmasq-dns-8b5c85b87-pckt5" Sep 29 19:25:14 crc kubenswrapper[4779]: I0929 19:25:14.759545 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/016ec3ae-ba64-4283-9dd1-1387254c5501-logs\") pod \"glance-default-external-api-0\" (UID: \"016ec3ae-ba64-4283-9dd1-1387254c5501\") " pod="openstack/glance-default-external-api-0" Sep 29 19:25:14 crc kubenswrapper[4779]: I0929 19:25:14.759601 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gmvrf\" (UniqueName: \"kubernetes.io/projected/5551aa76-af05-4686-b100-9b9f0664be70-kube-api-access-gmvrf\") pod \"dnsmasq-dns-8b5c85b87-pckt5\" (UID: \"5551aa76-af05-4686-b100-9b9f0664be70\") " pod="openstack/dnsmasq-dns-8b5c85b87-pckt5" Sep 29 19:25:14 crc kubenswrapper[4779]: I0929 19:25:14.759668 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tgts6\" (UniqueName: \"kubernetes.io/projected/016ec3ae-ba64-4283-9dd1-1387254c5501-kube-api-access-tgts6\") pod \"glance-default-external-api-0\" (UID: \"016ec3ae-ba64-4283-9dd1-1387254c5501\") " pod="openstack/glance-default-external-api-0" Sep 29 19:25:14 crc kubenswrapper[4779]: I0929 19:25:14.760168 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/5551aa76-af05-4686-b100-9b9f0664be70-dns-swift-storage-0\") pod \"dnsmasq-dns-8b5c85b87-pckt5\" (UID: \"5551aa76-af05-4686-b100-9b9f0664be70\") " pod="openstack/dnsmasq-dns-8b5c85b87-pckt5" Sep 29 19:25:14 crc kubenswrapper[4779]: I0929 19:25:14.760487 4779 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"glance-default-external-api-0\" (UID: \"016ec3ae-ba64-4283-9dd1-1387254c5501\") device mount path \"/mnt/openstack/pv08\"" pod="openstack/glance-default-external-api-0" Sep 29 19:25:14 crc kubenswrapper[4779]: I0929 19:25:14.761295 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/016ec3ae-ba64-4283-9dd1-1387254c5501-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"016ec3ae-ba64-4283-9dd1-1387254c5501\") " pod="openstack/glance-default-external-api-0" Sep 29 19:25:14 crc kubenswrapper[4779]: I0929 19:25:14.761868 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/016ec3ae-ba64-4283-9dd1-1387254c5501-logs\") pod \"glance-default-external-api-0\" (UID: \"016ec3ae-ba64-4283-9dd1-1387254c5501\") " pod="openstack/glance-default-external-api-0" Sep 29 19:25:14 crc kubenswrapper[4779]: I0929 19:25:14.762059 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5551aa76-af05-4686-b100-9b9f0664be70-ovsdbserver-sb\") pod \"dnsmasq-dns-8b5c85b87-pckt5\" (UID: \"5551aa76-af05-4686-b100-9b9f0664be70\") " pod="openstack/dnsmasq-dns-8b5c85b87-pckt5" Sep 29 19:25:14 crc kubenswrapper[4779]: I0929 19:25:14.762225 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5551aa76-af05-4686-b100-9b9f0664be70-dns-svc\") pod \"dnsmasq-dns-8b5c85b87-pckt5\" (UID: \"5551aa76-af05-4686-b100-9b9f0664be70\") " pod="openstack/dnsmasq-dns-8b5c85b87-pckt5" Sep 29 19:25:14 crc kubenswrapper[4779]: I0929 19:25:14.762804 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5551aa76-af05-4686-b100-9b9f0664be70-ovsdbserver-nb\") pod \"dnsmasq-dns-8b5c85b87-pckt5\" (UID: \"5551aa76-af05-4686-b100-9b9f0664be70\") " pod="openstack/dnsmasq-dns-8b5c85b87-pckt5" Sep 29 19:25:14 crc kubenswrapper[4779]: I0929 19:25:14.762899 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5551aa76-af05-4686-b100-9b9f0664be70-config\") pod \"dnsmasq-dns-8b5c85b87-pckt5\" (UID: \"5551aa76-af05-4686-b100-9b9f0664be70\") " pod="openstack/dnsmasq-dns-8b5c85b87-pckt5" Sep 29 19:25:14 crc kubenswrapper[4779]: I0929 19:25:14.768036 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/016ec3ae-ba64-4283-9dd1-1387254c5501-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"016ec3ae-ba64-4283-9dd1-1387254c5501\") " pod="openstack/glance-default-external-api-0" Sep 29 19:25:14 crc kubenswrapper[4779]: I0929 19:25:14.768110 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/016ec3ae-ba64-4283-9dd1-1387254c5501-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"016ec3ae-ba64-4283-9dd1-1387254c5501\") " pod="openstack/glance-default-external-api-0" Sep 29 19:25:14 crc kubenswrapper[4779]: I0929 19:25:14.768335 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/016ec3ae-ba64-4283-9dd1-1387254c5501-scripts\") pod \"glance-default-external-api-0\" (UID: \"016ec3ae-ba64-4283-9dd1-1387254c5501\") " pod="openstack/glance-default-external-api-0" Sep 29 19:25:14 crc kubenswrapper[4779]: I0929 19:25:14.768729 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/016ec3ae-ba64-4283-9dd1-1387254c5501-config-data\") pod \"glance-default-external-api-0\" (UID: \"016ec3ae-ba64-4283-9dd1-1387254c5501\") " pod="openstack/glance-default-external-api-0" Sep 29 19:25:14 crc kubenswrapper[4779]: I0929 19:25:14.778859 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tgts6\" (UniqueName: \"kubernetes.io/projected/016ec3ae-ba64-4283-9dd1-1387254c5501-kube-api-access-tgts6\") pod \"glance-default-external-api-0\" (UID: \"016ec3ae-ba64-4283-9dd1-1387254c5501\") " pod="openstack/glance-default-external-api-0" Sep 29 19:25:14 crc kubenswrapper[4779]: I0929 19:25:14.798425 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gmvrf\" (UniqueName: \"kubernetes.io/projected/5551aa76-af05-4686-b100-9b9f0664be70-kube-api-access-gmvrf\") pod \"dnsmasq-dns-8b5c85b87-pckt5\" (UID: \"5551aa76-af05-4686-b100-9b9f0664be70\") " pod="openstack/dnsmasq-dns-8b5c85b87-pckt5" Sep 29 19:25:14 crc kubenswrapper[4779]: I0929 19:25:14.820016 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"glance-default-external-api-0\" (UID: \"016ec3ae-ba64-4283-9dd1-1387254c5501\") " pod="openstack/glance-default-external-api-0" Sep 29 19:25:14 crc kubenswrapper[4779]: I0929 19:25:14.862819 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-xphf4" Sep 29 19:25:14 crc kubenswrapper[4779]: I0929 19:25:14.894349 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Sep 29 19:25:14 crc kubenswrapper[4779]: I0929 19:25:14.906111 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8b5c85b87-pckt5" Sep 29 19:25:15 crc kubenswrapper[4779]: I0929 19:25:15.011667 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-cq5wz"] Sep 29 19:25:15 crc kubenswrapper[4779]: I0929 19:25:15.019176 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5c5cc7c5ff-65hpw"] Sep 29 19:25:15 crc kubenswrapper[4779]: W0929 19:25:15.046729 4779 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod104158e9_b32f_4455_9e88_4adacf0dd01a.slice/crio-803031aae77aa686e4bba65078a206b09928917ee324708fbafc51019c6c4344 WatchSource:0}: Error finding container 803031aae77aa686e4bba65078a206b09928917ee324708fbafc51019c6c4344: Status 404 returned error can't find the container with id 803031aae77aa686e4bba65078a206b09928917ee324708fbafc51019c6c4344 Sep 29 19:25:15 crc kubenswrapper[4779]: I0929 19:25:15.119996 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Sep 29 19:25:15 crc kubenswrapper[4779]: I0929 19:25:15.135109 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Sep 29 19:25:15 crc kubenswrapper[4779]: I0929 19:25:15.138044 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Sep 29 19:25:15 crc kubenswrapper[4779]: I0929 19:25:15.146335 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Sep 29 19:25:15 crc kubenswrapper[4779]: I0929 19:25:15.146369 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-internal-svc" Sep 29 19:25:15 crc kubenswrapper[4779]: I0929 19:25:15.185487 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-59998c8f8c-xtjqn"] Sep 29 19:25:15 crc kubenswrapper[4779]: I0929 19:25:15.269523 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1b8b65c9-68d0-462c-adb4-22ae32355bb6-scripts\") pod \"glance-default-internal-api-0\" (UID: \"1b8b65c9-68d0-462c-adb4-22ae32355bb6\") " pod="openstack/glance-default-internal-api-0" Sep 29 19:25:15 crc kubenswrapper[4779]: I0929 19:25:15.269599 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/1b8b65c9-68d0-462c-adb4-22ae32355bb6-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"1b8b65c9-68d0-462c-adb4-22ae32355bb6\") " pod="openstack/glance-default-internal-api-0" Sep 29 19:25:15 crc kubenswrapper[4779]: I0929 19:25:15.269622 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"glance-default-internal-api-0\" (UID: \"1b8b65c9-68d0-462c-adb4-22ae32355bb6\") " pod="openstack/glance-default-internal-api-0" Sep 29 19:25:15 crc kubenswrapper[4779]: I0929 19:25:15.269661 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1b8b65c9-68d0-462c-adb4-22ae32355bb6-logs\") pod \"glance-default-internal-api-0\" (UID: \"1b8b65c9-68d0-462c-adb4-22ae32355bb6\") " pod="openstack/glance-default-internal-api-0" Sep 29 19:25:15 crc kubenswrapper[4779]: I0929 19:25:15.269686 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1b8b65c9-68d0-462c-adb4-22ae32355bb6-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"1b8b65c9-68d0-462c-adb4-22ae32355bb6\") " pod="openstack/glance-default-internal-api-0" Sep 29 19:25:15 crc kubenswrapper[4779]: I0929 19:25:15.269731 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/1b8b65c9-68d0-462c-adb4-22ae32355bb6-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"1b8b65c9-68d0-462c-adb4-22ae32355bb6\") " pod="openstack/glance-default-internal-api-0" Sep 29 19:25:15 crc kubenswrapper[4779]: I0929 19:25:15.269756 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rgs45\" (UniqueName: \"kubernetes.io/projected/1b8b65c9-68d0-462c-adb4-22ae32355bb6-kube-api-access-rgs45\") pod \"glance-default-internal-api-0\" (UID: \"1b8b65c9-68d0-462c-adb4-22ae32355bb6\") " pod="openstack/glance-default-internal-api-0" Sep 29 19:25:15 crc kubenswrapper[4779]: I0929 19:25:15.269778 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1b8b65c9-68d0-462c-adb4-22ae32355bb6-config-data\") pod \"glance-default-internal-api-0\" (UID: \"1b8b65c9-68d0-462c-adb4-22ae32355bb6\") " pod="openstack/glance-default-internal-api-0" Sep 29 19:25:15 crc kubenswrapper[4779]: I0929 19:25:15.293513 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 29 19:25:15 crc kubenswrapper[4779]: I0929 19:25:15.370933 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1b8b65c9-68d0-462c-adb4-22ae32355bb6-config-data\") pod \"glance-default-internal-api-0\" (UID: \"1b8b65c9-68d0-462c-adb4-22ae32355bb6\") " pod="openstack/glance-default-internal-api-0" Sep 29 19:25:15 crc kubenswrapper[4779]: I0929 19:25:15.371248 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1b8b65c9-68d0-462c-adb4-22ae32355bb6-scripts\") pod \"glance-default-internal-api-0\" (UID: \"1b8b65c9-68d0-462c-adb4-22ae32355bb6\") " pod="openstack/glance-default-internal-api-0" Sep 29 19:25:15 crc kubenswrapper[4779]: I0929 19:25:15.371295 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/1b8b65c9-68d0-462c-adb4-22ae32355bb6-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"1b8b65c9-68d0-462c-adb4-22ae32355bb6\") " pod="openstack/glance-default-internal-api-0" Sep 29 19:25:15 crc kubenswrapper[4779]: I0929 19:25:15.371312 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"glance-default-internal-api-0\" (UID: \"1b8b65c9-68d0-462c-adb4-22ae32355bb6\") " pod="openstack/glance-default-internal-api-0" Sep 29 19:25:15 crc kubenswrapper[4779]: I0929 19:25:15.371374 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1b8b65c9-68d0-462c-adb4-22ae32355bb6-logs\") pod \"glance-default-internal-api-0\" (UID: \"1b8b65c9-68d0-462c-adb4-22ae32355bb6\") " pod="openstack/glance-default-internal-api-0" Sep 29 19:25:15 crc kubenswrapper[4779]: I0929 19:25:15.371402 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1b8b65c9-68d0-462c-adb4-22ae32355bb6-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"1b8b65c9-68d0-462c-adb4-22ae32355bb6\") " pod="openstack/glance-default-internal-api-0" Sep 29 19:25:15 crc kubenswrapper[4779]: I0929 19:25:15.371442 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/1b8b65c9-68d0-462c-adb4-22ae32355bb6-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"1b8b65c9-68d0-462c-adb4-22ae32355bb6\") " pod="openstack/glance-default-internal-api-0" Sep 29 19:25:15 crc kubenswrapper[4779]: I0929 19:25:15.371467 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rgs45\" (UniqueName: \"kubernetes.io/projected/1b8b65c9-68d0-462c-adb4-22ae32355bb6-kube-api-access-rgs45\") pod \"glance-default-internal-api-0\" (UID: \"1b8b65c9-68d0-462c-adb4-22ae32355bb6\") " pod="openstack/glance-default-internal-api-0" Sep 29 19:25:15 crc kubenswrapper[4779]: I0929 19:25:15.372232 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1b8b65c9-68d0-462c-adb4-22ae32355bb6-logs\") pod \"glance-default-internal-api-0\" (UID: \"1b8b65c9-68d0-462c-adb4-22ae32355bb6\") " pod="openstack/glance-default-internal-api-0" Sep 29 19:25:15 crc kubenswrapper[4779]: I0929 19:25:15.373723 4779 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"glance-default-internal-api-0\" (UID: \"1b8b65c9-68d0-462c-adb4-22ae32355bb6\") device mount path \"/mnt/openstack/pv12\"" pod="openstack/glance-default-internal-api-0" Sep 29 19:25:15 crc kubenswrapper[4779]: I0929 19:25:15.374850 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/1b8b65c9-68d0-462c-adb4-22ae32355bb6-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"1b8b65c9-68d0-462c-adb4-22ae32355bb6\") " pod="openstack/glance-default-internal-api-0" Sep 29 19:25:15 crc kubenswrapper[4779]: I0929 19:25:15.375679 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1b8b65c9-68d0-462c-adb4-22ae32355bb6-scripts\") pod \"glance-default-internal-api-0\" (UID: \"1b8b65c9-68d0-462c-adb4-22ae32355bb6\") " pod="openstack/glance-default-internal-api-0" Sep 29 19:25:15 crc kubenswrapper[4779]: I0929 19:25:15.382072 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/1b8b65c9-68d0-462c-adb4-22ae32355bb6-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"1b8b65c9-68d0-462c-adb4-22ae32355bb6\") " pod="openstack/glance-default-internal-api-0" Sep 29 19:25:15 crc kubenswrapper[4779]: I0929 19:25:15.382500 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1b8b65c9-68d0-462c-adb4-22ae32355bb6-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"1b8b65c9-68d0-462c-adb4-22ae32355bb6\") " pod="openstack/glance-default-internal-api-0" Sep 29 19:25:15 crc kubenswrapper[4779]: I0929 19:25:15.386084 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1b8b65c9-68d0-462c-adb4-22ae32355bb6-config-data\") pod \"glance-default-internal-api-0\" (UID: \"1b8b65c9-68d0-462c-adb4-22ae32355bb6\") " pod="openstack/glance-default-internal-api-0" Sep 29 19:25:15 crc kubenswrapper[4779]: I0929 19:25:15.396325 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rgs45\" (UniqueName: \"kubernetes.io/projected/1b8b65c9-68d0-462c-adb4-22ae32355bb6-kube-api-access-rgs45\") pod \"glance-default-internal-api-0\" (UID: \"1b8b65c9-68d0-462c-adb4-22ae32355bb6\") " pod="openstack/glance-default-internal-api-0" Sep 29 19:25:15 crc kubenswrapper[4779]: I0929 19:25:15.429825 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-5f7856dd6f-tbtnj"] Sep 29 19:25:15 crc kubenswrapper[4779]: I0929 19:25:15.436953 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"glance-default-internal-api-0\" (UID: \"1b8b65c9-68d0-462c-adb4-22ae32355bb6\") " pod="openstack/glance-default-internal-api-0" Sep 29 19:25:15 crc kubenswrapper[4779]: I0929 19:25:15.459460 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-sync-xphf4"] Sep 29 19:25:15 crc kubenswrapper[4779]: E0929 19:25:15.563121 4779 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod104158e9_b32f_4455_9e88_4adacf0dd01a.slice/crio-conmon-80e5a83140c955e60b7b1138933a8c667c7d49eb42162d359879d73e48824ec4.scope\": RecentStats: unable to find data in memory cache]" Sep 29 19:25:15 crc kubenswrapper[4779]: I0929 19:25:15.579800 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-5f7856dd6f-tbtnj" event={"ID":"2a8f0557-90ad-401d-8bd2-702615ae52d9","Type":"ContainerStarted","Data":"8d7cdfb8d1bd05f6b5baf82ebcfb0b971a8482e6c689830e69ae96e375dad8eb"} Sep 29 19:25:15 crc kubenswrapper[4779]: I0929 19:25:15.584270 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ab4fe873-f58e-4b0f-8d40-1ac1be214b0c","Type":"ContainerStarted","Data":"f17533c21ca6c2c20c6d6e870ea9b1533f7fa3b97846a6b9152115c9370ff665"} Sep 29 19:25:15 crc kubenswrapper[4779]: I0929 19:25:15.588739 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-cq5wz" event={"ID":"3616fd00-99ce-4801-bdc8-90174bac56ba","Type":"ContainerStarted","Data":"ee63c651558ff4118a42199b7d391d96ea90fdca2e8705e528f0905396da6c3f"} Sep 29 19:25:15 crc kubenswrapper[4779]: I0929 19:25:15.588798 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-cq5wz" event={"ID":"3616fd00-99ce-4801-bdc8-90174bac56ba","Type":"ContainerStarted","Data":"fad94c68f0a94c3030121ef5d7888fdb44f94cabe48e52831b333e8c298595d9"} Sep 29 19:25:15 crc kubenswrapper[4779]: I0929 19:25:15.604079 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-xphf4" event={"ID":"48f7f919-2b5f-4954-8aa8-0b0abf3cb4fe","Type":"ContainerStarted","Data":"d1a10af839e0af4c4c2fd5839042e12441cfe88922744b62a6830001e9d829c6"} Sep 29 19:25:15 crc kubenswrapper[4779]: I0929 19:25:15.609755 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-bootstrap-cq5wz" podStartSLOduration=2.609735227 podStartE2EDuration="2.609735227s" podCreationTimestamp="2025-09-29 19:25:13 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 19:25:15.607035653 +0000 UTC m=+1026.491460763" watchObservedRunningTime="2025-09-29 19:25:15.609735227 +0000 UTC m=+1026.494160327" Sep 29 19:25:15 crc kubenswrapper[4779]: I0929 19:25:15.610598 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-59998c8f8c-xtjqn" event={"ID":"259eb000-7d67-43dc-9736-6ae36eb29098","Type":"ContainerStarted","Data":"3b09fd77bc6bddc906742212a5c2ff921431c059faeeaa939c97338c42d5ac2f"} Sep 29 19:25:15 crc kubenswrapper[4779]: I0929 19:25:15.612086 4779 generic.go:334] "Generic (PLEG): container finished" podID="104158e9-b32f-4455-9e88-4adacf0dd01a" containerID="80e5a83140c955e60b7b1138933a8c667c7d49eb42162d359879d73e48824ec4" exitCode=0 Sep 29 19:25:15 crc kubenswrapper[4779]: I0929 19:25:15.612371 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-7ff5475cc9-h22ps" podUID="fe3112c6-e01a-4173-b7d2-4a80b7779650" containerName="dnsmasq-dns" containerID="cri-o://725d38e5a9769b886fd1125e875021106089614da15c0f2d4f4b618399055eeb" gracePeriod=10 Sep 29 19:25:15 crc kubenswrapper[4779]: I0929 19:25:15.612597 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c5cc7c5ff-65hpw" event={"ID":"104158e9-b32f-4455-9e88-4adacf0dd01a","Type":"ContainerDied","Data":"80e5a83140c955e60b7b1138933a8c667c7d49eb42162d359879d73e48824ec4"} Sep 29 19:25:15 crc kubenswrapper[4779]: I0929 19:25:15.612638 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c5cc7c5ff-65hpw" event={"ID":"104158e9-b32f-4455-9e88-4adacf0dd01a","Type":"ContainerStarted","Data":"803031aae77aa686e4bba65078a206b09928917ee324708fbafc51019c6c4344"} Sep 29 19:25:15 crc kubenswrapper[4779]: I0929 19:25:15.649236 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Sep 29 19:25:15 crc kubenswrapper[4779]: I0929 19:25:15.657773 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Sep 29 19:25:15 crc kubenswrapper[4779]: I0929 19:25:15.673931 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-8b5c85b87-pckt5"] Sep 29 19:25:15 crc kubenswrapper[4779]: I0929 19:25:15.944260 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5c5cc7c5ff-65hpw" Sep 29 19:25:16 crc kubenswrapper[4779]: I0929 19:25:16.083469 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/104158e9-b32f-4455-9e88-4adacf0dd01a-config\") pod \"104158e9-b32f-4455-9e88-4adacf0dd01a\" (UID: \"104158e9-b32f-4455-9e88-4adacf0dd01a\") " Sep 29 19:25:16 crc kubenswrapper[4779]: I0929 19:25:16.083522 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hjcnx\" (UniqueName: \"kubernetes.io/projected/104158e9-b32f-4455-9e88-4adacf0dd01a-kube-api-access-hjcnx\") pod \"104158e9-b32f-4455-9e88-4adacf0dd01a\" (UID: \"104158e9-b32f-4455-9e88-4adacf0dd01a\") " Sep 29 19:25:16 crc kubenswrapper[4779]: I0929 19:25:16.083568 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/104158e9-b32f-4455-9e88-4adacf0dd01a-dns-swift-storage-0\") pod \"104158e9-b32f-4455-9e88-4adacf0dd01a\" (UID: \"104158e9-b32f-4455-9e88-4adacf0dd01a\") " Sep 29 19:25:16 crc kubenswrapper[4779]: I0929 19:25:16.083604 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/104158e9-b32f-4455-9e88-4adacf0dd01a-dns-svc\") pod \"104158e9-b32f-4455-9e88-4adacf0dd01a\" (UID: \"104158e9-b32f-4455-9e88-4adacf0dd01a\") " Sep 29 19:25:16 crc kubenswrapper[4779]: I0929 19:25:16.083625 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/104158e9-b32f-4455-9e88-4adacf0dd01a-ovsdbserver-sb\") pod \"104158e9-b32f-4455-9e88-4adacf0dd01a\" (UID: \"104158e9-b32f-4455-9e88-4adacf0dd01a\") " Sep 29 19:25:16 crc kubenswrapper[4779]: I0929 19:25:16.083667 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/104158e9-b32f-4455-9e88-4adacf0dd01a-ovsdbserver-nb\") pod \"104158e9-b32f-4455-9e88-4adacf0dd01a\" (UID: \"104158e9-b32f-4455-9e88-4adacf0dd01a\") " Sep 29 19:25:16 crc kubenswrapper[4779]: I0929 19:25:16.108493 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/104158e9-b32f-4455-9e88-4adacf0dd01a-kube-api-access-hjcnx" (OuterVolumeSpecName: "kube-api-access-hjcnx") pod "104158e9-b32f-4455-9e88-4adacf0dd01a" (UID: "104158e9-b32f-4455-9e88-4adacf0dd01a"). InnerVolumeSpecName "kube-api-access-hjcnx". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:25:16 crc kubenswrapper[4779]: I0929 19:25:16.122517 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/104158e9-b32f-4455-9e88-4adacf0dd01a-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "104158e9-b32f-4455-9e88-4adacf0dd01a" (UID: "104158e9-b32f-4455-9e88-4adacf0dd01a"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:25:16 crc kubenswrapper[4779]: I0929 19:25:16.126542 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/104158e9-b32f-4455-9e88-4adacf0dd01a-config" (OuterVolumeSpecName: "config") pod "104158e9-b32f-4455-9e88-4adacf0dd01a" (UID: "104158e9-b32f-4455-9e88-4adacf0dd01a"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:25:16 crc kubenswrapper[4779]: I0929 19:25:16.140605 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/104158e9-b32f-4455-9e88-4adacf0dd01a-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "104158e9-b32f-4455-9e88-4adacf0dd01a" (UID: "104158e9-b32f-4455-9e88-4adacf0dd01a"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:25:16 crc kubenswrapper[4779]: I0929 19:25:16.143140 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/104158e9-b32f-4455-9e88-4adacf0dd01a-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "104158e9-b32f-4455-9e88-4adacf0dd01a" (UID: "104158e9-b32f-4455-9e88-4adacf0dd01a"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:25:16 crc kubenswrapper[4779]: I0929 19:25:16.165922 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/104158e9-b32f-4455-9e88-4adacf0dd01a-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "104158e9-b32f-4455-9e88-4adacf0dd01a" (UID: "104158e9-b32f-4455-9e88-4adacf0dd01a"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:25:16 crc kubenswrapper[4779]: I0929 19:25:16.193616 4779 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/104158e9-b32f-4455-9e88-4adacf0dd01a-config\") on node \"crc\" DevicePath \"\"" Sep 29 19:25:16 crc kubenswrapper[4779]: I0929 19:25:16.193654 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hjcnx\" (UniqueName: \"kubernetes.io/projected/104158e9-b32f-4455-9e88-4adacf0dd01a-kube-api-access-hjcnx\") on node \"crc\" DevicePath \"\"" Sep 29 19:25:16 crc kubenswrapper[4779]: I0929 19:25:16.193697 4779 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/104158e9-b32f-4455-9e88-4adacf0dd01a-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Sep 29 19:25:16 crc kubenswrapper[4779]: I0929 19:25:16.193712 4779 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/104158e9-b32f-4455-9e88-4adacf0dd01a-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 29 19:25:16 crc kubenswrapper[4779]: I0929 19:25:16.193725 4779 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/104158e9-b32f-4455-9e88-4adacf0dd01a-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Sep 29 19:25:16 crc kubenswrapper[4779]: I0929 19:25:16.193736 4779 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/104158e9-b32f-4455-9e88-4adacf0dd01a-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Sep 29 19:25:16 crc kubenswrapper[4779]: I0929 19:25:16.290969 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7ff5475cc9-h22ps" Sep 29 19:25:16 crc kubenswrapper[4779]: I0929 19:25:16.297849 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Sep 29 19:25:16 crc kubenswrapper[4779]: W0929 19:25:16.320538 4779 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1b8b65c9_68d0_462c_adb4_22ae32355bb6.slice/crio-faefa4187170e01aec517e89cd50acfdb6f05acd24329875b5028c9d3d60e30d WatchSource:0}: Error finding container faefa4187170e01aec517e89cd50acfdb6f05acd24329875b5028c9d3d60e30d: Status 404 returned error can't find the container with id faefa4187170e01aec517e89cd50acfdb6f05acd24329875b5028c9d3d60e30d Sep 29 19:25:16 crc kubenswrapper[4779]: I0929 19:25:16.396717 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/fe3112c6-e01a-4173-b7d2-4a80b7779650-ovsdbserver-nb\") pod \"fe3112c6-e01a-4173-b7d2-4a80b7779650\" (UID: \"fe3112c6-e01a-4173-b7d2-4a80b7779650\") " Sep 29 19:25:16 crc kubenswrapper[4779]: I0929 19:25:16.396777 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fe3112c6-e01a-4173-b7d2-4a80b7779650-config\") pod \"fe3112c6-e01a-4173-b7d2-4a80b7779650\" (UID: \"fe3112c6-e01a-4173-b7d2-4a80b7779650\") " Sep 29 19:25:16 crc kubenswrapper[4779]: I0929 19:25:16.396828 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/fe3112c6-e01a-4173-b7d2-4a80b7779650-ovsdbserver-sb\") pod \"fe3112c6-e01a-4173-b7d2-4a80b7779650\" (UID: \"fe3112c6-e01a-4173-b7d2-4a80b7779650\") " Sep 29 19:25:16 crc kubenswrapper[4779]: I0929 19:25:16.396848 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/fe3112c6-e01a-4173-b7d2-4a80b7779650-dns-svc\") pod \"fe3112c6-e01a-4173-b7d2-4a80b7779650\" (UID: \"fe3112c6-e01a-4173-b7d2-4a80b7779650\") " Sep 29 19:25:16 crc kubenswrapper[4779]: I0929 19:25:16.396891 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bvfzq\" (UniqueName: \"kubernetes.io/projected/fe3112c6-e01a-4173-b7d2-4a80b7779650-kube-api-access-bvfzq\") pod \"fe3112c6-e01a-4173-b7d2-4a80b7779650\" (UID: \"fe3112c6-e01a-4173-b7d2-4a80b7779650\") " Sep 29 19:25:16 crc kubenswrapper[4779]: I0929 19:25:16.397318 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/fe3112c6-e01a-4173-b7d2-4a80b7779650-dns-swift-storage-0\") pod \"fe3112c6-e01a-4173-b7d2-4a80b7779650\" (UID: \"fe3112c6-e01a-4173-b7d2-4a80b7779650\") " Sep 29 19:25:16 crc kubenswrapper[4779]: I0929 19:25:16.403412 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fe3112c6-e01a-4173-b7d2-4a80b7779650-kube-api-access-bvfzq" (OuterVolumeSpecName: "kube-api-access-bvfzq") pod "fe3112c6-e01a-4173-b7d2-4a80b7779650" (UID: "fe3112c6-e01a-4173-b7d2-4a80b7779650"). InnerVolumeSpecName "kube-api-access-bvfzq". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:25:16 crc kubenswrapper[4779]: I0929 19:25:16.453174 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fe3112c6-e01a-4173-b7d2-4a80b7779650-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "fe3112c6-e01a-4173-b7d2-4a80b7779650" (UID: "fe3112c6-e01a-4173-b7d2-4a80b7779650"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:25:16 crc kubenswrapper[4779]: I0929 19:25:16.455049 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fe3112c6-e01a-4173-b7d2-4a80b7779650-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "fe3112c6-e01a-4173-b7d2-4a80b7779650" (UID: "fe3112c6-e01a-4173-b7d2-4a80b7779650"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:25:16 crc kubenswrapper[4779]: I0929 19:25:16.461848 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fe3112c6-e01a-4173-b7d2-4a80b7779650-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "fe3112c6-e01a-4173-b7d2-4a80b7779650" (UID: "fe3112c6-e01a-4173-b7d2-4a80b7779650"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:25:16 crc kubenswrapper[4779]: I0929 19:25:16.465244 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fe3112c6-e01a-4173-b7d2-4a80b7779650-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "fe3112c6-e01a-4173-b7d2-4a80b7779650" (UID: "fe3112c6-e01a-4173-b7d2-4a80b7779650"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:25:16 crc kubenswrapper[4779]: I0929 19:25:16.478427 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fe3112c6-e01a-4173-b7d2-4a80b7779650-config" (OuterVolumeSpecName: "config") pod "fe3112c6-e01a-4173-b7d2-4a80b7779650" (UID: "fe3112c6-e01a-4173-b7d2-4a80b7779650"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:25:16 crc kubenswrapper[4779]: I0929 19:25:16.499440 4779 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/fe3112c6-e01a-4173-b7d2-4a80b7779650-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Sep 29 19:25:16 crc kubenswrapper[4779]: I0929 19:25:16.499476 4779 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/fe3112c6-e01a-4173-b7d2-4a80b7779650-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 29 19:25:16 crc kubenswrapper[4779]: I0929 19:25:16.499509 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bvfzq\" (UniqueName: \"kubernetes.io/projected/fe3112c6-e01a-4173-b7d2-4a80b7779650-kube-api-access-bvfzq\") on node \"crc\" DevicePath \"\"" Sep 29 19:25:16 crc kubenswrapper[4779]: I0929 19:25:16.499524 4779 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/fe3112c6-e01a-4173-b7d2-4a80b7779650-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Sep 29 19:25:16 crc kubenswrapper[4779]: I0929 19:25:16.499537 4779 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/fe3112c6-e01a-4173-b7d2-4a80b7779650-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Sep 29 19:25:16 crc kubenswrapper[4779]: I0929 19:25:16.499547 4779 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fe3112c6-e01a-4173-b7d2-4a80b7779650-config\") on node \"crc\" DevicePath \"\"" Sep 29 19:25:16 crc kubenswrapper[4779]: I0929 19:25:16.650104 4779 generic.go:334] "Generic (PLEG): container finished" podID="5551aa76-af05-4686-b100-9b9f0664be70" containerID="7d509481fd4d21f0975019dbb02876aa2108cb9a1c3fbb4eda4303526583cec5" exitCode=0 Sep 29 19:25:16 crc kubenswrapper[4779]: I0929 19:25:16.650716 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8b5c85b87-pckt5" event={"ID":"5551aa76-af05-4686-b100-9b9f0664be70","Type":"ContainerDied","Data":"7d509481fd4d21f0975019dbb02876aa2108cb9a1c3fbb4eda4303526583cec5"} Sep 29 19:25:16 crc kubenswrapper[4779]: I0929 19:25:16.650786 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8b5c85b87-pckt5" event={"ID":"5551aa76-af05-4686-b100-9b9f0664be70","Type":"ContainerStarted","Data":"4037f0a02db51d3034d10b53b7f90d49130fe477309fa95534242238225155b3"} Sep 29 19:25:16 crc kubenswrapper[4779]: I0929 19:25:16.662207 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c5cc7c5ff-65hpw" event={"ID":"104158e9-b32f-4455-9e88-4adacf0dd01a","Type":"ContainerDied","Data":"803031aae77aa686e4bba65078a206b09928917ee324708fbafc51019c6c4344"} Sep 29 19:25:16 crc kubenswrapper[4779]: I0929 19:25:16.662346 4779 scope.go:117] "RemoveContainer" containerID="80e5a83140c955e60b7b1138933a8c667c7d49eb42162d359879d73e48824ec4" Sep 29 19:25:16 crc kubenswrapper[4779]: I0929 19:25:16.662393 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5c5cc7c5ff-65hpw" Sep 29 19:25:16 crc kubenswrapper[4779]: I0929 19:25:16.675603 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"1b8b65c9-68d0-462c-adb4-22ae32355bb6","Type":"ContainerStarted","Data":"faefa4187170e01aec517e89cd50acfdb6f05acd24329875b5028c9d3d60e30d"} Sep 29 19:25:16 crc kubenswrapper[4779]: I0929 19:25:16.696015 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"016ec3ae-ba64-4283-9dd1-1387254c5501","Type":"ContainerStarted","Data":"b43e99ccee18ccc3f9c75bb29bfb8e853ddad99cc490b8633995e3ca4a03d199"} Sep 29 19:25:16 crc kubenswrapper[4779]: I0929 19:25:16.696074 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"016ec3ae-ba64-4283-9dd1-1387254c5501","Type":"ContainerStarted","Data":"213f0608992298adcce44e841dd57be780ff48b5189ec762b2a86e1f4645cc08"} Sep 29 19:25:16 crc kubenswrapper[4779]: I0929 19:25:16.750727 4779 generic.go:334] "Generic (PLEG): container finished" podID="fe3112c6-e01a-4173-b7d2-4a80b7779650" containerID="725d38e5a9769b886fd1125e875021106089614da15c0f2d4f4b618399055eeb" exitCode=0 Sep 29 19:25:16 crc kubenswrapper[4779]: I0929 19:25:16.752242 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7ff5475cc9-h22ps" Sep 29 19:25:16 crc kubenswrapper[4779]: I0929 19:25:16.755598 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7ff5475cc9-h22ps" event={"ID":"fe3112c6-e01a-4173-b7d2-4a80b7779650","Type":"ContainerDied","Data":"725d38e5a9769b886fd1125e875021106089614da15c0f2d4f4b618399055eeb"} Sep 29 19:25:16 crc kubenswrapper[4779]: I0929 19:25:16.755823 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7ff5475cc9-h22ps" event={"ID":"fe3112c6-e01a-4173-b7d2-4a80b7779650","Type":"ContainerDied","Data":"cab61bba6794fc13b96fdc1409ba6add0663ca6c37ffa5ccd9bf752b2a880398"} Sep 29 19:25:16 crc kubenswrapper[4779]: I0929 19:25:16.790061 4779 scope.go:117] "RemoveContainer" containerID="725d38e5a9769b886fd1125e875021106089614da15c0f2d4f4b618399055eeb" Sep 29 19:25:16 crc kubenswrapper[4779]: I0929 19:25:16.847863 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5c5cc7c5ff-65hpw"] Sep 29 19:25:16 crc kubenswrapper[4779]: I0929 19:25:16.855501 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5c5cc7c5ff-65hpw"] Sep 29 19:25:16 crc kubenswrapper[4779]: I0929 19:25:16.871007 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7ff5475cc9-h22ps"] Sep 29 19:25:16 crc kubenswrapper[4779]: I0929 19:25:16.877340 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-7ff5475cc9-h22ps"] Sep 29 19:25:16 crc kubenswrapper[4779]: I0929 19:25:16.900393 4779 scope.go:117] "RemoveContainer" containerID="2312a39eecacc3e4bd478766f9a00f3df8677d4d1dcaa0a2ae31349e6e3bbd84" Sep 29 19:25:16 crc kubenswrapper[4779]: I0929 19:25:16.972630 4779 scope.go:117] "RemoveContainer" containerID="725d38e5a9769b886fd1125e875021106089614da15c0f2d4f4b618399055eeb" Sep 29 19:25:16 crc kubenswrapper[4779]: E0929 19:25:16.973023 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"725d38e5a9769b886fd1125e875021106089614da15c0f2d4f4b618399055eeb\": container with ID starting with 725d38e5a9769b886fd1125e875021106089614da15c0f2d4f4b618399055eeb not found: ID does not exist" containerID="725d38e5a9769b886fd1125e875021106089614da15c0f2d4f4b618399055eeb" Sep 29 19:25:16 crc kubenswrapper[4779]: I0929 19:25:16.973057 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"725d38e5a9769b886fd1125e875021106089614da15c0f2d4f4b618399055eeb"} err="failed to get container status \"725d38e5a9769b886fd1125e875021106089614da15c0f2d4f4b618399055eeb\": rpc error: code = NotFound desc = could not find container \"725d38e5a9769b886fd1125e875021106089614da15c0f2d4f4b618399055eeb\": container with ID starting with 725d38e5a9769b886fd1125e875021106089614da15c0f2d4f4b618399055eeb not found: ID does not exist" Sep 29 19:25:16 crc kubenswrapper[4779]: I0929 19:25:16.973078 4779 scope.go:117] "RemoveContainer" containerID="2312a39eecacc3e4bd478766f9a00f3df8677d4d1dcaa0a2ae31349e6e3bbd84" Sep 29 19:25:16 crc kubenswrapper[4779]: E0929 19:25:16.973519 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2312a39eecacc3e4bd478766f9a00f3df8677d4d1dcaa0a2ae31349e6e3bbd84\": container with ID starting with 2312a39eecacc3e4bd478766f9a00f3df8677d4d1dcaa0a2ae31349e6e3bbd84 not found: ID does not exist" containerID="2312a39eecacc3e4bd478766f9a00f3df8677d4d1dcaa0a2ae31349e6e3bbd84" Sep 29 19:25:16 crc kubenswrapper[4779]: I0929 19:25:16.973542 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2312a39eecacc3e4bd478766f9a00f3df8677d4d1dcaa0a2ae31349e6e3bbd84"} err="failed to get container status \"2312a39eecacc3e4bd478766f9a00f3df8677d4d1dcaa0a2ae31349e6e3bbd84\": rpc error: code = NotFound desc = could not find container \"2312a39eecacc3e4bd478766f9a00f3df8677d4d1dcaa0a2ae31349e6e3bbd84\": container with ID starting with 2312a39eecacc3e4bd478766f9a00f3df8677d4d1dcaa0a2ae31349e6e3bbd84 not found: ID does not exist" Sep 29 19:25:17 crc kubenswrapper[4779]: I0929 19:25:17.783050 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="104158e9-b32f-4455-9e88-4adacf0dd01a" path="/var/lib/kubelet/pods/104158e9-b32f-4455-9e88-4adacf0dd01a/volumes" Sep 29 19:25:17 crc kubenswrapper[4779]: I0929 19:25:17.783893 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fe3112c6-e01a-4173-b7d2-4a80b7779650" path="/var/lib/kubelet/pods/fe3112c6-e01a-4173-b7d2-4a80b7779650/volumes" Sep 29 19:25:17 crc kubenswrapper[4779]: I0929 19:25:17.784408 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"1b8b65c9-68d0-462c-adb4-22ae32355bb6","Type":"ContainerStarted","Data":"ebe3e1149816bdfb832b59a76ed9a53d5b40c95b140b0be6bc5e81336eae6cbd"} Sep 29 19:25:17 crc kubenswrapper[4779]: I0929 19:25:17.784625 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-8b5c85b87-pckt5" Sep 29 19:25:17 crc kubenswrapper[4779]: I0929 19:25:17.784645 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8b5c85b87-pckt5" event={"ID":"5551aa76-af05-4686-b100-9b9f0664be70","Type":"ContainerStarted","Data":"47064a5e1cc018d5b39ac1ee45e9e7849469e46160a3b15be8f9f6d72a13e757"} Sep 29 19:25:17 crc kubenswrapper[4779]: I0929 19:25:17.784755 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"016ec3ae-ba64-4283-9dd1-1387254c5501","Type":"ContainerStarted","Data":"6c365d1714c8fa4b7b3767f2559f27d6ee99b4f97c2265d8a82870c7629c7e3e"} Sep 29 19:25:17 crc kubenswrapper[4779]: I0929 19:25:17.791902 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-8b5c85b87-pckt5" podStartSLOduration=3.791885233 podStartE2EDuration="3.791885233s" podCreationTimestamp="2025-09-29 19:25:14 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 19:25:17.789464657 +0000 UTC m=+1028.673889757" watchObservedRunningTime="2025-09-29 19:25:17.791885233 +0000 UTC m=+1028.676310323" Sep 29 19:25:17 crc kubenswrapper[4779]: I0929 19:25:17.827551 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=3.8275329510000002 podStartE2EDuration="3.827532951s" podCreationTimestamp="2025-09-29 19:25:14 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 19:25:17.81684408 +0000 UTC m=+1028.701269180" watchObservedRunningTime="2025-09-29 19:25:17.827532951 +0000 UTC m=+1028.711958051" Sep 29 19:25:18 crc kubenswrapper[4779]: I0929 19:25:18.315475 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Sep 29 19:25:18 crc kubenswrapper[4779]: I0929 19:25:18.345000 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-59998c8f8c-xtjqn"] Sep 29 19:25:18 crc kubenswrapper[4779]: I0929 19:25:18.395323 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-6c8fdd957c-ft6jd"] Sep 29 19:25:18 crc kubenswrapper[4779]: E0929 19:25:18.395654 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fe3112c6-e01a-4173-b7d2-4a80b7779650" containerName="dnsmasq-dns" Sep 29 19:25:18 crc kubenswrapper[4779]: I0929 19:25:18.395667 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="fe3112c6-e01a-4173-b7d2-4a80b7779650" containerName="dnsmasq-dns" Sep 29 19:25:18 crc kubenswrapper[4779]: E0929 19:25:18.395681 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="104158e9-b32f-4455-9e88-4adacf0dd01a" containerName="init" Sep 29 19:25:18 crc kubenswrapper[4779]: I0929 19:25:18.395688 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="104158e9-b32f-4455-9e88-4adacf0dd01a" containerName="init" Sep 29 19:25:18 crc kubenswrapper[4779]: E0929 19:25:18.395704 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fe3112c6-e01a-4173-b7d2-4a80b7779650" containerName="init" Sep 29 19:25:18 crc kubenswrapper[4779]: I0929 19:25:18.395710 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="fe3112c6-e01a-4173-b7d2-4a80b7779650" containerName="init" Sep 29 19:25:18 crc kubenswrapper[4779]: I0929 19:25:18.395880 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="fe3112c6-e01a-4173-b7d2-4a80b7779650" containerName="dnsmasq-dns" Sep 29 19:25:18 crc kubenswrapper[4779]: I0929 19:25:18.395900 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="104158e9-b32f-4455-9e88-4adacf0dd01a" containerName="init" Sep 29 19:25:18 crc kubenswrapper[4779]: I0929 19:25:18.396787 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-6c8fdd957c-ft6jd" Sep 29 19:25:18 crc kubenswrapper[4779]: I0929 19:25:18.412266 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-6c8fdd957c-ft6jd"] Sep 29 19:25:18 crc kubenswrapper[4779]: I0929 19:25:18.446620 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Sep 29 19:25:18 crc kubenswrapper[4779]: I0929 19:25:18.450453 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l6fmv\" (UniqueName: \"kubernetes.io/projected/8ac1ccdd-a13c-4f3c-9e06-731454c5f58d-kube-api-access-l6fmv\") pod \"horizon-6c8fdd957c-ft6jd\" (UID: \"8ac1ccdd-a13c-4f3c-9e06-731454c5f58d\") " pod="openstack/horizon-6c8fdd957c-ft6jd" Sep 29 19:25:18 crc kubenswrapper[4779]: I0929 19:25:18.450522 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/8ac1ccdd-a13c-4f3c-9e06-731454c5f58d-horizon-secret-key\") pod \"horizon-6c8fdd957c-ft6jd\" (UID: \"8ac1ccdd-a13c-4f3c-9e06-731454c5f58d\") " pod="openstack/horizon-6c8fdd957c-ft6jd" Sep 29 19:25:18 crc kubenswrapper[4779]: I0929 19:25:18.450548 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/8ac1ccdd-a13c-4f3c-9e06-731454c5f58d-config-data\") pod \"horizon-6c8fdd957c-ft6jd\" (UID: \"8ac1ccdd-a13c-4f3c-9e06-731454c5f58d\") " pod="openstack/horizon-6c8fdd957c-ft6jd" Sep 29 19:25:18 crc kubenswrapper[4779]: I0929 19:25:18.450596 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8ac1ccdd-a13c-4f3c-9e06-731454c5f58d-logs\") pod \"horizon-6c8fdd957c-ft6jd\" (UID: \"8ac1ccdd-a13c-4f3c-9e06-731454c5f58d\") " pod="openstack/horizon-6c8fdd957c-ft6jd" Sep 29 19:25:18 crc kubenswrapper[4779]: I0929 19:25:18.450635 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/8ac1ccdd-a13c-4f3c-9e06-731454c5f58d-scripts\") pod \"horizon-6c8fdd957c-ft6jd\" (UID: \"8ac1ccdd-a13c-4f3c-9e06-731454c5f58d\") " pod="openstack/horizon-6c8fdd957c-ft6jd" Sep 29 19:25:18 crc kubenswrapper[4779]: I0929 19:25:18.461358 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Sep 29 19:25:18 crc kubenswrapper[4779]: I0929 19:25:18.552015 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/8ac1ccdd-a13c-4f3c-9e06-731454c5f58d-scripts\") pod \"horizon-6c8fdd957c-ft6jd\" (UID: \"8ac1ccdd-a13c-4f3c-9e06-731454c5f58d\") " pod="openstack/horizon-6c8fdd957c-ft6jd" Sep 29 19:25:18 crc kubenswrapper[4779]: I0929 19:25:18.552083 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l6fmv\" (UniqueName: \"kubernetes.io/projected/8ac1ccdd-a13c-4f3c-9e06-731454c5f58d-kube-api-access-l6fmv\") pod \"horizon-6c8fdd957c-ft6jd\" (UID: \"8ac1ccdd-a13c-4f3c-9e06-731454c5f58d\") " pod="openstack/horizon-6c8fdd957c-ft6jd" Sep 29 19:25:18 crc kubenswrapper[4779]: I0929 19:25:18.552158 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/8ac1ccdd-a13c-4f3c-9e06-731454c5f58d-horizon-secret-key\") pod \"horizon-6c8fdd957c-ft6jd\" (UID: \"8ac1ccdd-a13c-4f3c-9e06-731454c5f58d\") " pod="openstack/horizon-6c8fdd957c-ft6jd" Sep 29 19:25:18 crc kubenswrapper[4779]: I0929 19:25:18.552181 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/8ac1ccdd-a13c-4f3c-9e06-731454c5f58d-config-data\") pod \"horizon-6c8fdd957c-ft6jd\" (UID: \"8ac1ccdd-a13c-4f3c-9e06-731454c5f58d\") " pod="openstack/horizon-6c8fdd957c-ft6jd" Sep 29 19:25:18 crc kubenswrapper[4779]: I0929 19:25:18.552231 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8ac1ccdd-a13c-4f3c-9e06-731454c5f58d-logs\") pod \"horizon-6c8fdd957c-ft6jd\" (UID: \"8ac1ccdd-a13c-4f3c-9e06-731454c5f58d\") " pod="openstack/horizon-6c8fdd957c-ft6jd" Sep 29 19:25:18 crc kubenswrapper[4779]: I0929 19:25:18.553013 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/8ac1ccdd-a13c-4f3c-9e06-731454c5f58d-scripts\") pod \"horizon-6c8fdd957c-ft6jd\" (UID: \"8ac1ccdd-a13c-4f3c-9e06-731454c5f58d\") " pod="openstack/horizon-6c8fdd957c-ft6jd" Sep 29 19:25:18 crc kubenswrapper[4779]: I0929 19:25:18.553607 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8ac1ccdd-a13c-4f3c-9e06-731454c5f58d-logs\") pod \"horizon-6c8fdd957c-ft6jd\" (UID: \"8ac1ccdd-a13c-4f3c-9e06-731454c5f58d\") " pod="openstack/horizon-6c8fdd957c-ft6jd" Sep 29 19:25:18 crc kubenswrapper[4779]: I0929 19:25:18.553721 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/8ac1ccdd-a13c-4f3c-9e06-731454c5f58d-config-data\") pod \"horizon-6c8fdd957c-ft6jd\" (UID: \"8ac1ccdd-a13c-4f3c-9e06-731454c5f58d\") " pod="openstack/horizon-6c8fdd957c-ft6jd" Sep 29 19:25:18 crc kubenswrapper[4779]: I0929 19:25:18.558549 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/8ac1ccdd-a13c-4f3c-9e06-731454c5f58d-horizon-secret-key\") pod \"horizon-6c8fdd957c-ft6jd\" (UID: \"8ac1ccdd-a13c-4f3c-9e06-731454c5f58d\") " pod="openstack/horizon-6c8fdd957c-ft6jd" Sep 29 19:25:18 crc kubenswrapper[4779]: I0929 19:25:18.572901 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l6fmv\" (UniqueName: \"kubernetes.io/projected/8ac1ccdd-a13c-4f3c-9e06-731454c5f58d-kube-api-access-l6fmv\") pod \"horizon-6c8fdd957c-ft6jd\" (UID: \"8ac1ccdd-a13c-4f3c-9e06-731454c5f58d\") " pod="openstack/horizon-6c8fdd957c-ft6jd" Sep 29 19:25:18 crc kubenswrapper[4779]: I0929 19:25:18.718949 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-6c8fdd957c-ft6jd" Sep 29 19:25:18 crc kubenswrapper[4779]: I0929 19:25:18.794963 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"1b8b65c9-68d0-462c-adb4-22ae32355bb6","Type":"ContainerStarted","Data":"d1f7b1883c123b4c55039c83bb20002226027b09ffa31124b8786369ceb92d9c"} Sep 29 19:25:18 crc kubenswrapper[4779]: I0929 19:25:18.817518 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=4.81749959 podStartE2EDuration="4.81749959s" podCreationTimestamp="2025-09-29 19:25:14 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 19:25:18.813031749 +0000 UTC m=+1029.697456849" watchObservedRunningTime="2025-09-29 19:25:18.81749959 +0000 UTC m=+1029.701924690" Sep 29 19:25:19 crc kubenswrapper[4779]: I0929 19:25:19.837582 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="016ec3ae-ba64-4283-9dd1-1387254c5501" containerName="glance-log" containerID="cri-o://b43e99ccee18ccc3f9c75bb29bfb8e853ddad99cc490b8633995e3ca4a03d199" gracePeriod=30 Sep 29 19:25:19 crc kubenswrapper[4779]: I0929 19:25:19.838843 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="1b8b65c9-68d0-462c-adb4-22ae32355bb6" containerName="glance-log" containerID="cri-o://ebe3e1149816bdfb832b59a76ed9a53d5b40c95b140b0be6bc5e81336eae6cbd" gracePeriod=30 Sep 29 19:25:19 crc kubenswrapper[4779]: I0929 19:25:19.839115 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="016ec3ae-ba64-4283-9dd1-1387254c5501" containerName="glance-httpd" containerID="cri-o://6c365d1714c8fa4b7b3767f2559f27d6ee99b4f97c2265d8a82870c7629c7e3e" gracePeriod=30 Sep 29 19:25:19 crc kubenswrapper[4779]: I0929 19:25:19.839686 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="1b8b65c9-68d0-462c-adb4-22ae32355bb6" containerName="glance-httpd" containerID="cri-o://d1f7b1883c123b4c55039c83bb20002226027b09ffa31124b8786369ceb92d9c" gracePeriod=30 Sep 29 19:25:20 crc kubenswrapper[4779]: I0929 19:25:20.847178 4779 generic.go:334] "Generic (PLEG): container finished" podID="1b8b65c9-68d0-462c-adb4-22ae32355bb6" containerID="d1f7b1883c123b4c55039c83bb20002226027b09ffa31124b8786369ceb92d9c" exitCode=0 Sep 29 19:25:20 crc kubenswrapper[4779]: I0929 19:25:20.847455 4779 generic.go:334] "Generic (PLEG): container finished" podID="1b8b65c9-68d0-462c-adb4-22ae32355bb6" containerID="ebe3e1149816bdfb832b59a76ed9a53d5b40c95b140b0be6bc5e81336eae6cbd" exitCode=143 Sep 29 19:25:20 crc kubenswrapper[4779]: I0929 19:25:20.847271 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"1b8b65c9-68d0-462c-adb4-22ae32355bb6","Type":"ContainerDied","Data":"d1f7b1883c123b4c55039c83bb20002226027b09ffa31124b8786369ceb92d9c"} Sep 29 19:25:20 crc kubenswrapper[4779]: I0929 19:25:20.847521 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"1b8b65c9-68d0-462c-adb4-22ae32355bb6","Type":"ContainerDied","Data":"ebe3e1149816bdfb832b59a76ed9a53d5b40c95b140b0be6bc5e81336eae6cbd"} Sep 29 19:25:20 crc kubenswrapper[4779]: I0929 19:25:20.849449 4779 generic.go:334] "Generic (PLEG): container finished" podID="016ec3ae-ba64-4283-9dd1-1387254c5501" containerID="6c365d1714c8fa4b7b3767f2559f27d6ee99b4f97c2265d8a82870c7629c7e3e" exitCode=0 Sep 29 19:25:20 crc kubenswrapper[4779]: I0929 19:25:20.849484 4779 generic.go:334] "Generic (PLEG): container finished" podID="016ec3ae-ba64-4283-9dd1-1387254c5501" containerID="b43e99ccee18ccc3f9c75bb29bfb8e853ddad99cc490b8633995e3ca4a03d199" exitCode=143 Sep 29 19:25:20 crc kubenswrapper[4779]: I0929 19:25:20.849510 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"016ec3ae-ba64-4283-9dd1-1387254c5501","Type":"ContainerDied","Data":"6c365d1714c8fa4b7b3767f2559f27d6ee99b4f97c2265d8a82870c7629c7e3e"} Sep 29 19:25:20 crc kubenswrapper[4779]: I0929 19:25:20.849537 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"016ec3ae-ba64-4283-9dd1-1387254c5501","Type":"ContainerDied","Data":"b43e99ccee18ccc3f9c75bb29bfb8e853ddad99cc490b8633995e3ca4a03d199"} Sep 29 19:25:21 crc kubenswrapper[4779]: I0929 19:25:21.868263 4779 generic.go:334] "Generic (PLEG): container finished" podID="3616fd00-99ce-4801-bdc8-90174bac56ba" containerID="ee63c651558ff4118a42199b7d391d96ea90fdca2e8705e528f0905396da6c3f" exitCode=0 Sep 29 19:25:21 crc kubenswrapper[4779]: I0929 19:25:21.868537 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-cq5wz" event={"ID":"3616fd00-99ce-4801-bdc8-90174bac56ba","Type":"ContainerDied","Data":"ee63c651558ff4118a42199b7d391d96ea90fdca2e8705e528f0905396da6c3f"} Sep 29 19:25:22 crc kubenswrapper[4779]: I0929 19:25:22.371300 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-5f48-account-create-vt6p4"] Sep 29 19:25:22 crc kubenswrapper[4779]: I0929 19:25:22.372399 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-5f48-account-create-vt6p4" Sep 29 19:25:22 crc kubenswrapper[4779]: I0929 19:25:22.374432 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-db-secret" Sep 29 19:25:22 crc kubenswrapper[4779]: I0929 19:25:22.379339 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-5f48-account-create-vt6p4"] Sep 29 19:25:22 crc kubenswrapper[4779]: I0929 19:25:22.523704 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qdqz4\" (UniqueName: \"kubernetes.io/projected/35f86f4e-1390-4e24-bd8e-2a5cd9899d29-kube-api-access-qdqz4\") pod \"cinder-5f48-account-create-vt6p4\" (UID: \"35f86f4e-1390-4e24-bd8e-2a5cd9899d29\") " pod="openstack/cinder-5f48-account-create-vt6p4" Sep 29 19:25:22 crc kubenswrapper[4779]: I0929 19:25:22.588185 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-df0d-account-create-fhcpp"] Sep 29 19:25:22 crc kubenswrapper[4779]: I0929 19:25:22.589343 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-df0d-account-create-fhcpp" Sep 29 19:25:22 crc kubenswrapper[4779]: I0929 19:25:22.592088 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-db-secret" Sep 29 19:25:22 crc kubenswrapper[4779]: I0929 19:25:22.602524 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-df0d-account-create-fhcpp"] Sep 29 19:25:22 crc kubenswrapper[4779]: I0929 19:25:22.626144 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qdqz4\" (UniqueName: \"kubernetes.io/projected/35f86f4e-1390-4e24-bd8e-2a5cd9899d29-kube-api-access-qdqz4\") pod \"cinder-5f48-account-create-vt6p4\" (UID: \"35f86f4e-1390-4e24-bd8e-2a5cd9899d29\") " pod="openstack/cinder-5f48-account-create-vt6p4" Sep 29 19:25:22 crc kubenswrapper[4779]: I0929 19:25:22.649567 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qdqz4\" (UniqueName: \"kubernetes.io/projected/35f86f4e-1390-4e24-bd8e-2a5cd9899d29-kube-api-access-qdqz4\") pod \"cinder-5f48-account-create-vt6p4\" (UID: \"35f86f4e-1390-4e24-bd8e-2a5cd9899d29\") " pod="openstack/cinder-5f48-account-create-vt6p4" Sep 29 19:25:22 crc kubenswrapper[4779]: I0929 19:25:22.689218 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-5f48-account-create-vt6p4" Sep 29 19:25:22 crc kubenswrapper[4779]: I0929 19:25:22.727653 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dp7vj\" (UniqueName: \"kubernetes.io/projected/1f88f50b-5057-4d90-b8d4-fdd4526eaf25-kube-api-access-dp7vj\") pod \"barbican-df0d-account-create-fhcpp\" (UID: \"1f88f50b-5057-4d90-b8d4-fdd4526eaf25\") " pod="openstack/barbican-df0d-account-create-fhcpp" Sep 29 19:25:22 crc kubenswrapper[4779]: I0929 19:25:22.772616 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-5f5e-account-create-2n6rt"] Sep 29 19:25:22 crc kubenswrapper[4779]: I0929 19:25:22.773842 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-5f5e-account-create-2n6rt" Sep 29 19:25:22 crc kubenswrapper[4779]: I0929 19:25:22.779812 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-db-secret" Sep 29 19:25:22 crc kubenswrapper[4779]: I0929 19:25:22.797379 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-5f5e-account-create-2n6rt"] Sep 29 19:25:22 crc kubenswrapper[4779]: I0929 19:25:22.829730 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dp7vj\" (UniqueName: \"kubernetes.io/projected/1f88f50b-5057-4d90-b8d4-fdd4526eaf25-kube-api-access-dp7vj\") pod \"barbican-df0d-account-create-fhcpp\" (UID: \"1f88f50b-5057-4d90-b8d4-fdd4526eaf25\") " pod="openstack/barbican-df0d-account-create-fhcpp" Sep 29 19:25:22 crc kubenswrapper[4779]: I0929 19:25:22.846812 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dp7vj\" (UniqueName: \"kubernetes.io/projected/1f88f50b-5057-4d90-b8d4-fdd4526eaf25-kube-api-access-dp7vj\") pod \"barbican-df0d-account-create-fhcpp\" (UID: \"1f88f50b-5057-4d90-b8d4-fdd4526eaf25\") " pod="openstack/barbican-df0d-account-create-fhcpp" Sep 29 19:25:22 crc kubenswrapper[4779]: I0929 19:25:22.915002 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-df0d-account-create-fhcpp" Sep 29 19:25:22 crc kubenswrapper[4779]: I0929 19:25:22.931064 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wbl47\" (UniqueName: \"kubernetes.io/projected/b449307e-0969-471c-84c5-ce1a24b143e3-kube-api-access-wbl47\") pod \"neutron-5f5e-account-create-2n6rt\" (UID: \"b449307e-0969-471c-84c5-ce1a24b143e3\") " pod="openstack/neutron-5f5e-account-create-2n6rt" Sep 29 19:25:23 crc kubenswrapper[4779]: I0929 19:25:23.033358 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wbl47\" (UniqueName: \"kubernetes.io/projected/b449307e-0969-471c-84c5-ce1a24b143e3-kube-api-access-wbl47\") pod \"neutron-5f5e-account-create-2n6rt\" (UID: \"b449307e-0969-471c-84c5-ce1a24b143e3\") " pod="openstack/neutron-5f5e-account-create-2n6rt" Sep 29 19:25:23 crc kubenswrapper[4779]: I0929 19:25:23.051936 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wbl47\" (UniqueName: \"kubernetes.io/projected/b449307e-0969-471c-84c5-ce1a24b143e3-kube-api-access-wbl47\") pod \"neutron-5f5e-account-create-2n6rt\" (UID: \"b449307e-0969-471c-84c5-ce1a24b143e3\") " pod="openstack/neutron-5f5e-account-create-2n6rt" Sep 29 19:25:23 crc kubenswrapper[4779]: I0929 19:25:23.117773 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-5f5e-account-create-2n6rt" Sep 29 19:25:23 crc kubenswrapper[4779]: I0929 19:25:23.364224 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-5f7856dd6f-tbtnj"] Sep 29 19:25:23 crc kubenswrapper[4779]: I0929 19:25:23.398264 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-6f7f5b6d48-8js86"] Sep 29 19:25:23 crc kubenswrapper[4779]: I0929 19:25:23.399931 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-6f7f5b6d48-8js86" Sep 29 19:25:23 crc kubenswrapper[4779]: I0929 19:25:23.402107 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-horizon-svc" Sep 29 19:25:23 crc kubenswrapper[4779]: I0929 19:25:23.422299 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-6f7f5b6d48-8js86"] Sep 29 19:25:23 crc kubenswrapper[4779]: I0929 19:25:23.502543 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-6c8fdd957c-ft6jd"] Sep 29 19:25:23 crc kubenswrapper[4779]: I0929 19:25:23.542415 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/442ef8d4-8019-432f-8715-9b2a5aaaa022-horizon-tls-certs\") pod \"horizon-6f7f5b6d48-8js86\" (UID: \"442ef8d4-8019-432f-8715-9b2a5aaaa022\") " pod="openstack/horizon-6f7f5b6d48-8js86" Sep 29 19:25:23 crc kubenswrapper[4779]: I0929 19:25:23.542495 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/442ef8d4-8019-432f-8715-9b2a5aaaa022-config-data\") pod \"horizon-6f7f5b6d48-8js86\" (UID: \"442ef8d4-8019-432f-8715-9b2a5aaaa022\") " pod="openstack/horizon-6f7f5b6d48-8js86" Sep 29 19:25:23 crc kubenswrapper[4779]: I0929 19:25:23.542685 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/442ef8d4-8019-432f-8715-9b2a5aaaa022-horizon-secret-key\") pod \"horizon-6f7f5b6d48-8js86\" (UID: \"442ef8d4-8019-432f-8715-9b2a5aaaa022\") " pod="openstack/horizon-6f7f5b6d48-8js86" Sep 29 19:25:23 crc kubenswrapper[4779]: I0929 19:25:23.542873 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gsd8l\" (UniqueName: \"kubernetes.io/projected/442ef8d4-8019-432f-8715-9b2a5aaaa022-kube-api-access-gsd8l\") pod \"horizon-6f7f5b6d48-8js86\" (UID: \"442ef8d4-8019-432f-8715-9b2a5aaaa022\") " pod="openstack/horizon-6f7f5b6d48-8js86" Sep 29 19:25:23 crc kubenswrapper[4779]: I0929 19:25:23.542939 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/442ef8d4-8019-432f-8715-9b2a5aaaa022-logs\") pod \"horizon-6f7f5b6d48-8js86\" (UID: \"442ef8d4-8019-432f-8715-9b2a5aaaa022\") " pod="openstack/horizon-6f7f5b6d48-8js86" Sep 29 19:25:23 crc kubenswrapper[4779]: I0929 19:25:23.542977 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/442ef8d4-8019-432f-8715-9b2a5aaaa022-scripts\") pod \"horizon-6f7f5b6d48-8js86\" (UID: \"442ef8d4-8019-432f-8715-9b2a5aaaa022\") " pod="openstack/horizon-6f7f5b6d48-8js86" Sep 29 19:25:23 crc kubenswrapper[4779]: I0929 19:25:23.543046 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/442ef8d4-8019-432f-8715-9b2a5aaaa022-combined-ca-bundle\") pod \"horizon-6f7f5b6d48-8js86\" (UID: \"442ef8d4-8019-432f-8715-9b2a5aaaa022\") " pod="openstack/horizon-6f7f5b6d48-8js86" Sep 29 19:25:23 crc kubenswrapper[4779]: I0929 19:25:23.546036 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-fc6fd7df6-btpzz"] Sep 29 19:25:23 crc kubenswrapper[4779]: I0929 19:25:23.547523 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-fc6fd7df6-btpzz" Sep 29 19:25:23 crc kubenswrapper[4779]: I0929 19:25:23.567780 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-fc6fd7df6-btpzz"] Sep 29 19:25:23 crc kubenswrapper[4779]: I0929 19:25:23.645164 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/442ef8d4-8019-432f-8715-9b2a5aaaa022-horizon-tls-certs\") pod \"horizon-6f7f5b6d48-8js86\" (UID: \"442ef8d4-8019-432f-8715-9b2a5aaaa022\") " pod="openstack/horizon-6f7f5b6d48-8js86" Sep 29 19:25:23 crc kubenswrapper[4779]: I0929 19:25:23.645342 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6cd722c9-4e9b-4bad-a9fd-84529803680b-logs\") pod \"horizon-fc6fd7df6-btpzz\" (UID: \"6cd722c9-4e9b-4bad-a9fd-84529803680b\") " pod="openstack/horizon-fc6fd7df6-btpzz" Sep 29 19:25:23 crc kubenswrapper[4779]: I0929 19:25:23.645427 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/6cd722c9-4e9b-4bad-a9fd-84529803680b-config-data\") pod \"horizon-fc6fd7df6-btpzz\" (UID: \"6cd722c9-4e9b-4bad-a9fd-84529803680b\") " pod="openstack/horizon-fc6fd7df6-btpzz" Sep 29 19:25:23 crc kubenswrapper[4779]: I0929 19:25:23.645458 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/6cd722c9-4e9b-4bad-a9fd-84529803680b-horizon-tls-certs\") pod \"horizon-fc6fd7df6-btpzz\" (UID: \"6cd722c9-4e9b-4bad-a9fd-84529803680b\") " pod="openstack/horizon-fc6fd7df6-btpzz" Sep 29 19:25:23 crc kubenswrapper[4779]: I0929 19:25:23.645540 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/442ef8d4-8019-432f-8715-9b2a5aaaa022-config-data\") pod \"horizon-6f7f5b6d48-8js86\" (UID: \"442ef8d4-8019-432f-8715-9b2a5aaaa022\") " pod="openstack/horizon-6f7f5b6d48-8js86" Sep 29 19:25:23 crc kubenswrapper[4779]: I0929 19:25:23.645588 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qmghp\" (UniqueName: \"kubernetes.io/projected/6cd722c9-4e9b-4bad-a9fd-84529803680b-kube-api-access-qmghp\") pod \"horizon-fc6fd7df6-btpzz\" (UID: \"6cd722c9-4e9b-4bad-a9fd-84529803680b\") " pod="openstack/horizon-fc6fd7df6-btpzz" Sep 29 19:25:23 crc kubenswrapper[4779]: I0929 19:25:23.645705 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/442ef8d4-8019-432f-8715-9b2a5aaaa022-horizon-secret-key\") pod \"horizon-6f7f5b6d48-8js86\" (UID: \"442ef8d4-8019-432f-8715-9b2a5aaaa022\") " pod="openstack/horizon-6f7f5b6d48-8js86" Sep 29 19:25:23 crc kubenswrapper[4779]: I0929 19:25:23.647097 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/6cd722c9-4e9b-4bad-a9fd-84529803680b-horizon-secret-key\") pod \"horizon-fc6fd7df6-btpzz\" (UID: \"6cd722c9-4e9b-4bad-a9fd-84529803680b\") " pod="openstack/horizon-fc6fd7df6-btpzz" Sep 29 19:25:23 crc kubenswrapper[4779]: I0929 19:25:23.647193 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gsd8l\" (UniqueName: \"kubernetes.io/projected/442ef8d4-8019-432f-8715-9b2a5aaaa022-kube-api-access-gsd8l\") pod \"horizon-6f7f5b6d48-8js86\" (UID: \"442ef8d4-8019-432f-8715-9b2a5aaaa022\") " pod="openstack/horizon-6f7f5b6d48-8js86" Sep 29 19:25:23 crc kubenswrapper[4779]: I0929 19:25:23.647246 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/442ef8d4-8019-432f-8715-9b2a5aaaa022-logs\") pod \"horizon-6f7f5b6d48-8js86\" (UID: \"442ef8d4-8019-432f-8715-9b2a5aaaa022\") " pod="openstack/horizon-6f7f5b6d48-8js86" Sep 29 19:25:23 crc kubenswrapper[4779]: I0929 19:25:23.647282 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/442ef8d4-8019-432f-8715-9b2a5aaaa022-scripts\") pod \"horizon-6f7f5b6d48-8js86\" (UID: \"442ef8d4-8019-432f-8715-9b2a5aaaa022\") " pod="openstack/horizon-6f7f5b6d48-8js86" Sep 29 19:25:23 crc kubenswrapper[4779]: I0929 19:25:23.647368 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/442ef8d4-8019-432f-8715-9b2a5aaaa022-combined-ca-bundle\") pod \"horizon-6f7f5b6d48-8js86\" (UID: \"442ef8d4-8019-432f-8715-9b2a5aaaa022\") " pod="openstack/horizon-6f7f5b6d48-8js86" Sep 29 19:25:23 crc kubenswrapper[4779]: I0929 19:25:23.647402 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6cd722c9-4e9b-4bad-a9fd-84529803680b-combined-ca-bundle\") pod \"horizon-fc6fd7df6-btpzz\" (UID: \"6cd722c9-4e9b-4bad-a9fd-84529803680b\") " pod="openstack/horizon-fc6fd7df6-btpzz" Sep 29 19:25:23 crc kubenswrapper[4779]: I0929 19:25:23.647432 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/6cd722c9-4e9b-4bad-a9fd-84529803680b-scripts\") pod \"horizon-fc6fd7df6-btpzz\" (UID: \"6cd722c9-4e9b-4bad-a9fd-84529803680b\") " pod="openstack/horizon-fc6fd7df6-btpzz" Sep 29 19:25:23 crc kubenswrapper[4779]: I0929 19:25:23.648414 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/442ef8d4-8019-432f-8715-9b2a5aaaa022-config-data\") pod \"horizon-6f7f5b6d48-8js86\" (UID: \"442ef8d4-8019-432f-8715-9b2a5aaaa022\") " pod="openstack/horizon-6f7f5b6d48-8js86" Sep 29 19:25:23 crc kubenswrapper[4779]: I0929 19:25:23.648458 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/442ef8d4-8019-432f-8715-9b2a5aaaa022-logs\") pod \"horizon-6f7f5b6d48-8js86\" (UID: \"442ef8d4-8019-432f-8715-9b2a5aaaa022\") " pod="openstack/horizon-6f7f5b6d48-8js86" Sep 29 19:25:23 crc kubenswrapper[4779]: I0929 19:25:23.648548 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/442ef8d4-8019-432f-8715-9b2a5aaaa022-scripts\") pod \"horizon-6f7f5b6d48-8js86\" (UID: \"442ef8d4-8019-432f-8715-9b2a5aaaa022\") " pod="openstack/horizon-6f7f5b6d48-8js86" Sep 29 19:25:23 crc kubenswrapper[4779]: I0929 19:25:23.649554 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/442ef8d4-8019-432f-8715-9b2a5aaaa022-horizon-tls-certs\") pod \"horizon-6f7f5b6d48-8js86\" (UID: \"442ef8d4-8019-432f-8715-9b2a5aaaa022\") " pod="openstack/horizon-6f7f5b6d48-8js86" Sep 29 19:25:23 crc kubenswrapper[4779]: I0929 19:25:23.654743 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/442ef8d4-8019-432f-8715-9b2a5aaaa022-horizon-secret-key\") pod \"horizon-6f7f5b6d48-8js86\" (UID: \"442ef8d4-8019-432f-8715-9b2a5aaaa022\") " pod="openstack/horizon-6f7f5b6d48-8js86" Sep 29 19:25:23 crc kubenswrapper[4779]: I0929 19:25:23.663342 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gsd8l\" (UniqueName: \"kubernetes.io/projected/442ef8d4-8019-432f-8715-9b2a5aaaa022-kube-api-access-gsd8l\") pod \"horizon-6f7f5b6d48-8js86\" (UID: \"442ef8d4-8019-432f-8715-9b2a5aaaa022\") " pod="openstack/horizon-6f7f5b6d48-8js86" Sep 29 19:25:23 crc kubenswrapper[4779]: I0929 19:25:23.664061 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/442ef8d4-8019-432f-8715-9b2a5aaaa022-combined-ca-bundle\") pod \"horizon-6f7f5b6d48-8js86\" (UID: \"442ef8d4-8019-432f-8715-9b2a5aaaa022\") " pod="openstack/horizon-6f7f5b6d48-8js86" Sep 29 19:25:23 crc kubenswrapper[4779]: I0929 19:25:23.728218 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-6f7f5b6d48-8js86" Sep 29 19:25:23 crc kubenswrapper[4779]: I0929 19:25:23.748675 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6cd722c9-4e9b-4bad-a9fd-84529803680b-combined-ca-bundle\") pod \"horizon-fc6fd7df6-btpzz\" (UID: \"6cd722c9-4e9b-4bad-a9fd-84529803680b\") " pod="openstack/horizon-fc6fd7df6-btpzz" Sep 29 19:25:23 crc kubenswrapper[4779]: I0929 19:25:23.748737 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/6cd722c9-4e9b-4bad-a9fd-84529803680b-scripts\") pod \"horizon-fc6fd7df6-btpzz\" (UID: \"6cd722c9-4e9b-4bad-a9fd-84529803680b\") " pod="openstack/horizon-fc6fd7df6-btpzz" Sep 29 19:25:23 crc kubenswrapper[4779]: I0929 19:25:23.748772 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6cd722c9-4e9b-4bad-a9fd-84529803680b-logs\") pod \"horizon-fc6fd7df6-btpzz\" (UID: \"6cd722c9-4e9b-4bad-a9fd-84529803680b\") " pod="openstack/horizon-fc6fd7df6-btpzz" Sep 29 19:25:23 crc kubenswrapper[4779]: I0929 19:25:23.748795 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/6cd722c9-4e9b-4bad-a9fd-84529803680b-config-data\") pod \"horizon-fc6fd7df6-btpzz\" (UID: \"6cd722c9-4e9b-4bad-a9fd-84529803680b\") " pod="openstack/horizon-fc6fd7df6-btpzz" Sep 29 19:25:23 crc kubenswrapper[4779]: I0929 19:25:23.748812 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/6cd722c9-4e9b-4bad-a9fd-84529803680b-horizon-tls-certs\") pod \"horizon-fc6fd7df6-btpzz\" (UID: \"6cd722c9-4e9b-4bad-a9fd-84529803680b\") " pod="openstack/horizon-fc6fd7df6-btpzz" Sep 29 19:25:23 crc kubenswrapper[4779]: I0929 19:25:23.748837 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qmghp\" (UniqueName: \"kubernetes.io/projected/6cd722c9-4e9b-4bad-a9fd-84529803680b-kube-api-access-qmghp\") pod \"horizon-fc6fd7df6-btpzz\" (UID: \"6cd722c9-4e9b-4bad-a9fd-84529803680b\") " pod="openstack/horizon-fc6fd7df6-btpzz" Sep 29 19:25:23 crc kubenswrapper[4779]: I0929 19:25:23.748897 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/6cd722c9-4e9b-4bad-a9fd-84529803680b-horizon-secret-key\") pod \"horizon-fc6fd7df6-btpzz\" (UID: \"6cd722c9-4e9b-4bad-a9fd-84529803680b\") " pod="openstack/horizon-fc6fd7df6-btpzz" Sep 29 19:25:23 crc kubenswrapper[4779]: I0929 19:25:23.749882 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6cd722c9-4e9b-4bad-a9fd-84529803680b-logs\") pod \"horizon-fc6fd7df6-btpzz\" (UID: \"6cd722c9-4e9b-4bad-a9fd-84529803680b\") " pod="openstack/horizon-fc6fd7df6-btpzz" Sep 29 19:25:23 crc kubenswrapper[4779]: I0929 19:25:23.750062 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/6cd722c9-4e9b-4bad-a9fd-84529803680b-scripts\") pod \"horizon-fc6fd7df6-btpzz\" (UID: \"6cd722c9-4e9b-4bad-a9fd-84529803680b\") " pod="openstack/horizon-fc6fd7df6-btpzz" Sep 29 19:25:23 crc kubenswrapper[4779]: I0929 19:25:23.750886 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/6cd722c9-4e9b-4bad-a9fd-84529803680b-config-data\") pod \"horizon-fc6fd7df6-btpzz\" (UID: \"6cd722c9-4e9b-4bad-a9fd-84529803680b\") " pod="openstack/horizon-fc6fd7df6-btpzz" Sep 29 19:25:23 crc kubenswrapper[4779]: I0929 19:25:23.752101 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/6cd722c9-4e9b-4bad-a9fd-84529803680b-horizon-tls-certs\") pod \"horizon-fc6fd7df6-btpzz\" (UID: \"6cd722c9-4e9b-4bad-a9fd-84529803680b\") " pod="openstack/horizon-fc6fd7df6-btpzz" Sep 29 19:25:23 crc kubenswrapper[4779]: I0929 19:25:23.752993 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/6cd722c9-4e9b-4bad-a9fd-84529803680b-horizon-secret-key\") pod \"horizon-fc6fd7df6-btpzz\" (UID: \"6cd722c9-4e9b-4bad-a9fd-84529803680b\") " pod="openstack/horizon-fc6fd7df6-btpzz" Sep 29 19:25:23 crc kubenswrapper[4779]: I0929 19:25:23.753507 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6cd722c9-4e9b-4bad-a9fd-84529803680b-combined-ca-bundle\") pod \"horizon-fc6fd7df6-btpzz\" (UID: \"6cd722c9-4e9b-4bad-a9fd-84529803680b\") " pod="openstack/horizon-fc6fd7df6-btpzz" Sep 29 19:25:23 crc kubenswrapper[4779]: I0929 19:25:23.772232 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qmghp\" (UniqueName: \"kubernetes.io/projected/6cd722c9-4e9b-4bad-a9fd-84529803680b-kube-api-access-qmghp\") pod \"horizon-fc6fd7df6-btpzz\" (UID: \"6cd722c9-4e9b-4bad-a9fd-84529803680b\") " pod="openstack/horizon-fc6fd7df6-btpzz" Sep 29 19:25:23 crc kubenswrapper[4779]: I0929 19:25:23.862194 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-fc6fd7df6-btpzz" Sep 29 19:25:24 crc kubenswrapper[4779]: I0929 19:25:24.914483 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-8b5c85b87-pckt5" Sep 29 19:25:24 crc kubenswrapper[4779]: I0929 19:25:24.978575 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-77585f5f8c-zkpbk"] Sep 29 19:25:24 crc kubenswrapper[4779]: I0929 19:25:24.979368 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-77585f5f8c-zkpbk" podUID="09860743-67e8-4aa3-9814-90b13f91317e" containerName="dnsmasq-dns" containerID="cri-o://501b263905e99102640de533c53dee9756db975bff779ac2b3171660964d3b35" gracePeriod=10 Sep 29 19:25:26 crc kubenswrapper[4779]: I0929 19:25:26.914548 4779 generic.go:334] "Generic (PLEG): container finished" podID="09860743-67e8-4aa3-9814-90b13f91317e" containerID="501b263905e99102640de533c53dee9756db975bff779ac2b3171660964d3b35" exitCode=0 Sep 29 19:25:26 crc kubenswrapper[4779]: I0929 19:25:26.914725 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-77585f5f8c-zkpbk" event={"ID":"09860743-67e8-4aa3-9814-90b13f91317e","Type":"ContainerDied","Data":"501b263905e99102640de533c53dee9756db975bff779ac2b3171660964d3b35"} Sep 29 19:25:27 crc kubenswrapper[4779]: I0929 19:25:27.888880 4779 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-77585f5f8c-zkpbk" podUID="09860743-67e8-4aa3-9814-90b13f91317e" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.125:5353: connect: connection refused" Sep 29 19:25:31 crc kubenswrapper[4779]: E0929 19:25:31.610727 4779 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-horizon:current-podified" Sep 29 19:25:31 crc kubenswrapper[4779]: E0929 19:25:31.611247 4779 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:horizon-log,Image:quay.io/podified-antelope-centos9/openstack-horizon:current-podified,Command:[/bin/bash],Args:[-c tail -n+1 -F /var/log/horizon/horizon.log],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n5dch545h5bfhdch685h55ch659h5cchf9h5b7h9dhf6h9h88hd7h557h7fh55bh55ch599h668h58h64bh64dh565hd8hf5hb9h55dh56dh5fdh59bq,ValueFrom:nil,},EnvVar{Name:ENABLE_DESIGNATE,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_HEAT,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_IRONIC,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_MANILA,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_OCTAVIA,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_WATCHER,Value:no,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},EnvVar{Name:UNPACK_THEME,Value:true,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:logs,ReadOnly:false,MountPath:/var/log/horizon,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-g7hkf,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*48,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*true,RunAsGroup:*42400,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod horizon-59998c8f8c-xtjqn_openstack(259eb000-7d67-43dc-9736-6ae36eb29098): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Sep 29 19:25:31 crc kubenswrapper[4779]: E0929 19:25:31.614091 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"horizon-log\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\", failed to \"StartContainer\" for \"horizon\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-horizon:current-podified\\\"\"]" pod="openstack/horizon-59998c8f8c-xtjqn" podUID="259eb000-7d67-43dc-9736-6ae36eb29098" Sep 29 19:25:32 crc kubenswrapper[4779]: E0929 19:25:32.004397 4779 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-ceilometer-central:current-podified" Sep 29 19:25:32 crc kubenswrapper[4779]: E0929 19:25:32.005010 4779 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:ceilometer-central-agent,Image:quay.io/podified-antelope-centos9/openstack-ceilometer-central:current-podified,Command:[/bin/bash],Args:[-c /usr/local/bin/kolla_set_configs && /usr/local/bin/kolla_start],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n588h575h5b7h5d6h5ch5dfh5c8h99hd8h5fch64ch574hb7h5chd7h679h586h5b4h57bh9fh68h644h65bh679hb8h67hb7h5ddh79h5f6hd8hd4q,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:scripts,ReadOnly:true,MountPath:/var/lib/openstack/bin,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/openstack/config,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/config.json,SubPath:ceilometer-central-config.json,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-t5qlh,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:&ExecAction{Command:[/usr/bin/python3 /var/lib/openstack/bin/centralhealth.py],},HTTPGet:nil,TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:300,TimeoutSeconds:5,PeriodSeconds:5,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*0,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod ceilometer-0_openstack(ab4fe873-f58e-4b0f-8d40-1ac1be214b0c): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Sep 29 19:25:32 crc kubenswrapper[4779]: E0929 19:25:32.026120 4779 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-horizon:current-podified" Sep 29 19:25:32 crc kubenswrapper[4779]: E0929 19:25:32.026545 4779 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:horizon-log,Image:quay.io/podified-antelope-centos9/openstack-horizon:current-podified,Command:[/bin/bash],Args:[-c tail -n+1 -F /var/log/horizon/horizon.log],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:ndbh65ch58bh55h56h667h5b7h6bh678h58fh585h554h54bhf6h6ch599hd7h696h58bh575h577h56bh596h56hb6h647hd6h576h655h576hfh5bcq,ValueFrom:nil,},EnvVar{Name:ENABLE_DESIGNATE,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_HEAT,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_IRONIC,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_MANILA,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_OCTAVIA,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_WATCHER,Value:no,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},EnvVar{Name:UNPACK_THEME,Value:true,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:logs,ReadOnly:false,MountPath:/var/log/horizon,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-c7bkb,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*48,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*true,RunAsGroup:*42400,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod horizon-5f7856dd6f-tbtnj_openstack(2a8f0557-90ad-401d-8bd2-702615ae52d9): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Sep 29 19:25:32 crc kubenswrapper[4779]: E0929 19:25:32.034053 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"horizon-log\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\", failed to \"StartContainer\" for \"horizon\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-horizon:current-podified\\\"\"]" pod="openstack/horizon-5f7856dd6f-tbtnj" podUID="2a8f0557-90ad-401d-8bd2-702615ae52d9" Sep 29 19:25:32 crc kubenswrapper[4779]: I0929 19:25:32.240574 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-cq5wz" Sep 29 19:25:32 crc kubenswrapper[4779]: I0929 19:25:32.318438 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fxpsg\" (UniqueName: \"kubernetes.io/projected/3616fd00-99ce-4801-bdc8-90174bac56ba-kube-api-access-fxpsg\") pod \"3616fd00-99ce-4801-bdc8-90174bac56ba\" (UID: \"3616fd00-99ce-4801-bdc8-90174bac56ba\") " Sep 29 19:25:32 crc kubenswrapper[4779]: I0929 19:25:32.318580 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/3616fd00-99ce-4801-bdc8-90174bac56ba-fernet-keys\") pod \"3616fd00-99ce-4801-bdc8-90174bac56ba\" (UID: \"3616fd00-99ce-4801-bdc8-90174bac56ba\") " Sep 29 19:25:32 crc kubenswrapper[4779]: I0929 19:25:32.318624 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3616fd00-99ce-4801-bdc8-90174bac56ba-config-data\") pod \"3616fd00-99ce-4801-bdc8-90174bac56ba\" (UID: \"3616fd00-99ce-4801-bdc8-90174bac56ba\") " Sep 29 19:25:32 crc kubenswrapper[4779]: I0929 19:25:32.318639 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/3616fd00-99ce-4801-bdc8-90174bac56ba-credential-keys\") pod \"3616fd00-99ce-4801-bdc8-90174bac56ba\" (UID: \"3616fd00-99ce-4801-bdc8-90174bac56ba\") " Sep 29 19:25:32 crc kubenswrapper[4779]: I0929 19:25:32.318687 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3616fd00-99ce-4801-bdc8-90174bac56ba-scripts\") pod \"3616fd00-99ce-4801-bdc8-90174bac56ba\" (UID: \"3616fd00-99ce-4801-bdc8-90174bac56ba\") " Sep 29 19:25:32 crc kubenswrapper[4779]: I0929 19:25:32.318765 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3616fd00-99ce-4801-bdc8-90174bac56ba-combined-ca-bundle\") pod \"3616fd00-99ce-4801-bdc8-90174bac56ba\" (UID: \"3616fd00-99ce-4801-bdc8-90174bac56ba\") " Sep 29 19:25:32 crc kubenswrapper[4779]: I0929 19:25:32.330205 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3616fd00-99ce-4801-bdc8-90174bac56ba-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "3616fd00-99ce-4801-bdc8-90174bac56ba" (UID: "3616fd00-99ce-4801-bdc8-90174bac56ba"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:25:32 crc kubenswrapper[4779]: I0929 19:25:32.330928 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3616fd00-99ce-4801-bdc8-90174bac56ba-kube-api-access-fxpsg" (OuterVolumeSpecName: "kube-api-access-fxpsg") pod "3616fd00-99ce-4801-bdc8-90174bac56ba" (UID: "3616fd00-99ce-4801-bdc8-90174bac56ba"). InnerVolumeSpecName "kube-api-access-fxpsg". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:25:32 crc kubenswrapper[4779]: I0929 19:25:32.331595 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3616fd00-99ce-4801-bdc8-90174bac56ba-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "3616fd00-99ce-4801-bdc8-90174bac56ba" (UID: "3616fd00-99ce-4801-bdc8-90174bac56ba"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:25:32 crc kubenswrapper[4779]: I0929 19:25:32.334886 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3616fd00-99ce-4801-bdc8-90174bac56ba-scripts" (OuterVolumeSpecName: "scripts") pod "3616fd00-99ce-4801-bdc8-90174bac56ba" (UID: "3616fd00-99ce-4801-bdc8-90174bac56ba"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:25:32 crc kubenswrapper[4779]: I0929 19:25:32.360128 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3616fd00-99ce-4801-bdc8-90174bac56ba-config-data" (OuterVolumeSpecName: "config-data") pod "3616fd00-99ce-4801-bdc8-90174bac56ba" (UID: "3616fd00-99ce-4801-bdc8-90174bac56ba"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:25:32 crc kubenswrapper[4779]: I0929 19:25:32.370531 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3616fd00-99ce-4801-bdc8-90174bac56ba-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "3616fd00-99ce-4801-bdc8-90174bac56ba" (UID: "3616fd00-99ce-4801-bdc8-90174bac56ba"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:25:32 crc kubenswrapper[4779]: I0929 19:25:32.421767 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fxpsg\" (UniqueName: \"kubernetes.io/projected/3616fd00-99ce-4801-bdc8-90174bac56ba-kube-api-access-fxpsg\") on node \"crc\" DevicePath \"\"" Sep 29 19:25:32 crc kubenswrapper[4779]: I0929 19:25:32.421806 4779 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/3616fd00-99ce-4801-bdc8-90174bac56ba-fernet-keys\") on node \"crc\" DevicePath \"\"" Sep 29 19:25:32 crc kubenswrapper[4779]: I0929 19:25:32.421886 4779 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3616fd00-99ce-4801-bdc8-90174bac56ba-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 19:25:32 crc kubenswrapper[4779]: I0929 19:25:32.421899 4779 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/3616fd00-99ce-4801-bdc8-90174bac56ba-credential-keys\") on node \"crc\" DevicePath \"\"" Sep 29 19:25:32 crc kubenswrapper[4779]: I0929 19:25:32.421910 4779 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3616fd00-99ce-4801-bdc8-90174bac56ba-scripts\") on node \"crc\" DevicePath \"\"" Sep 29 19:25:32 crc kubenswrapper[4779]: I0929 19:25:32.421922 4779 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3616fd00-99ce-4801-bdc8-90174bac56ba-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 19:25:32 crc kubenswrapper[4779]: I0929 19:25:32.494402 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-59998c8f8c-xtjqn" Sep 29 19:25:32 crc kubenswrapper[4779]: I0929 19:25:32.603198 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-77585f5f8c-zkpbk" Sep 29 19:25:32 crc kubenswrapper[4779]: I0929 19:25:32.631503 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/259eb000-7d67-43dc-9736-6ae36eb29098-config-data\") pod \"259eb000-7d67-43dc-9736-6ae36eb29098\" (UID: \"259eb000-7d67-43dc-9736-6ae36eb29098\") " Sep 29 19:25:32 crc kubenswrapper[4779]: I0929 19:25:32.631615 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/259eb000-7d67-43dc-9736-6ae36eb29098-horizon-secret-key\") pod \"259eb000-7d67-43dc-9736-6ae36eb29098\" (UID: \"259eb000-7d67-43dc-9736-6ae36eb29098\") " Sep 29 19:25:32 crc kubenswrapper[4779]: I0929 19:25:32.631708 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/259eb000-7d67-43dc-9736-6ae36eb29098-logs\") pod \"259eb000-7d67-43dc-9736-6ae36eb29098\" (UID: \"259eb000-7d67-43dc-9736-6ae36eb29098\") " Sep 29 19:25:32 crc kubenswrapper[4779]: I0929 19:25:32.631730 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-g7hkf\" (UniqueName: \"kubernetes.io/projected/259eb000-7d67-43dc-9736-6ae36eb29098-kube-api-access-g7hkf\") pod \"259eb000-7d67-43dc-9736-6ae36eb29098\" (UID: \"259eb000-7d67-43dc-9736-6ae36eb29098\") " Sep 29 19:25:32 crc kubenswrapper[4779]: I0929 19:25:32.631760 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/259eb000-7d67-43dc-9736-6ae36eb29098-scripts\") pod \"259eb000-7d67-43dc-9736-6ae36eb29098\" (UID: \"259eb000-7d67-43dc-9736-6ae36eb29098\") " Sep 29 19:25:32 crc kubenswrapper[4779]: I0929 19:25:32.632915 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/259eb000-7d67-43dc-9736-6ae36eb29098-scripts" (OuterVolumeSpecName: "scripts") pod "259eb000-7d67-43dc-9736-6ae36eb29098" (UID: "259eb000-7d67-43dc-9736-6ae36eb29098"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:25:32 crc kubenswrapper[4779]: I0929 19:25:32.633030 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/259eb000-7d67-43dc-9736-6ae36eb29098-config-data" (OuterVolumeSpecName: "config-data") pod "259eb000-7d67-43dc-9736-6ae36eb29098" (UID: "259eb000-7d67-43dc-9736-6ae36eb29098"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:25:32 crc kubenswrapper[4779]: I0929 19:25:32.633750 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/259eb000-7d67-43dc-9736-6ae36eb29098-logs" (OuterVolumeSpecName: "logs") pod "259eb000-7d67-43dc-9736-6ae36eb29098" (UID: "259eb000-7d67-43dc-9736-6ae36eb29098"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 19:25:32 crc kubenswrapper[4779]: I0929 19:25:32.644489 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/259eb000-7d67-43dc-9736-6ae36eb29098-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "259eb000-7d67-43dc-9736-6ae36eb29098" (UID: "259eb000-7d67-43dc-9736-6ae36eb29098"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:25:32 crc kubenswrapper[4779]: I0929 19:25:32.644547 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/259eb000-7d67-43dc-9736-6ae36eb29098-kube-api-access-g7hkf" (OuterVolumeSpecName: "kube-api-access-g7hkf") pod "259eb000-7d67-43dc-9736-6ae36eb29098" (UID: "259eb000-7d67-43dc-9736-6ae36eb29098"). InnerVolumeSpecName "kube-api-access-g7hkf". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:25:32 crc kubenswrapper[4779]: I0929 19:25:32.651391 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Sep 29 19:25:32 crc kubenswrapper[4779]: I0929 19:25:32.734021 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rgs45\" (UniqueName: \"kubernetes.io/projected/1b8b65c9-68d0-462c-adb4-22ae32355bb6-kube-api-access-rgs45\") pod \"1b8b65c9-68d0-462c-adb4-22ae32355bb6\" (UID: \"1b8b65c9-68d0-462c-adb4-22ae32355bb6\") " Sep 29 19:25:32 crc kubenswrapper[4779]: I0929 19:25:32.734675 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1b8b65c9-68d0-462c-adb4-22ae32355bb6-combined-ca-bundle\") pod \"1b8b65c9-68d0-462c-adb4-22ae32355bb6\" (UID: \"1b8b65c9-68d0-462c-adb4-22ae32355bb6\") " Sep 29 19:25:32 crc kubenswrapper[4779]: I0929 19:25:32.734722 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/09860743-67e8-4aa3-9814-90b13f91317e-ovsdbserver-sb\") pod \"09860743-67e8-4aa3-9814-90b13f91317e\" (UID: \"09860743-67e8-4aa3-9814-90b13f91317e\") " Sep 29 19:25:32 crc kubenswrapper[4779]: I0929 19:25:32.734741 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1b8b65c9-68d0-462c-adb4-22ae32355bb6-config-data\") pod \"1b8b65c9-68d0-462c-adb4-22ae32355bb6\" (UID: \"1b8b65c9-68d0-462c-adb4-22ae32355bb6\") " Sep 29 19:25:32 crc kubenswrapper[4779]: I0929 19:25:32.734763 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"1b8b65c9-68d0-462c-adb4-22ae32355bb6\" (UID: \"1b8b65c9-68d0-462c-adb4-22ae32355bb6\") " Sep 29 19:25:32 crc kubenswrapper[4779]: I0929 19:25:32.734817 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1b8b65c9-68d0-462c-adb4-22ae32355bb6-logs\") pod \"1b8b65c9-68d0-462c-adb4-22ae32355bb6\" (UID: \"1b8b65c9-68d0-462c-adb4-22ae32355bb6\") " Sep 29 19:25:32 crc kubenswrapper[4779]: I0929 19:25:32.734846 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dtt98\" (UniqueName: \"kubernetes.io/projected/09860743-67e8-4aa3-9814-90b13f91317e-kube-api-access-dtt98\") pod \"09860743-67e8-4aa3-9814-90b13f91317e\" (UID: \"09860743-67e8-4aa3-9814-90b13f91317e\") " Sep 29 19:25:32 crc kubenswrapper[4779]: I0929 19:25:32.734867 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/09860743-67e8-4aa3-9814-90b13f91317e-dns-swift-storage-0\") pod \"09860743-67e8-4aa3-9814-90b13f91317e\" (UID: \"09860743-67e8-4aa3-9814-90b13f91317e\") " Sep 29 19:25:32 crc kubenswrapper[4779]: I0929 19:25:32.734888 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/09860743-67e8-4aa3-9814-90b13f91317e-ovsdbserver-nb\") pod \"09860743-67e8-4aa3-9814-90b13f91317e\" (UID: \"09860743-67e8-4aa3-9814-90b13f91317e\") " Sep 29 19:25:32 crc kubenswrapper[4779]: I0929 19:25:32.734955 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/1b8b65c9-68d0-462c-adb4-22ae32355bb6-httpd-run\") pod \"1b8b65c9-68d0-462c-adb4-22ae32355bb6\" (UID: \"1b8b65c9-68d0-462c-adb4-22ae32355bb6\") " Sep 29 19:25:32 crc kubenswrapper[4779]: I0929 19:25:32.735010 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1b8b65c9-68d0-462c-adb4-22ae32355bb6-scripts\") pod \"1b8b65c9-68d0-462c-adb4-22ae32355bb6\" (UID: \"1b8b65c9-68d0-462c-adb4-22ae32355bb6\") " Sep 29 19:25:32 crc kubenswrapper[4779]: I0929 19:25:32.735033 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09860743-67e8-4aa3-9814-90b13f91317e-config\") pod \"09860743-67e8-4aa3-9814-90b13f91317e\" (UID: \"09860743-67e8-4aa3-9814-90b13f91317e\") " Sep 29 19:25:32 crc kubenswrapper[4779]: I0929 19:25:32.735057 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/1b8b65c9-68d0-462c-adb4-22ae32355bb6-internal-tls-certs\") pod \"1b8b65c9-68d0-462c-adb4-22ae32355bb6\" (UID: \"1b8b65c9-68d0-462c-adb4-22ae32355bb6\") " Sep 29 19:25:32 crc kubenswrapper[4779]: I0929 19:25:32.735110 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/09860743-67e8-4aa3-9814-90b13f91317e-dns-svc\") pod \"09860743-67e8-4aa3-9814-90b13f91317e\" (UID: \"09860743-67e8-4aa3-9814-90b13f91317e\") " Sep 29 19:25:32 crc kubenswrapper[4779]: I0929 19:25:32.735332 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1b8b65c9-68d0-462c-adb4-22ae32355bb6-logs" (OuterVolumeSpecName: "logs") pod "1b8b65c9-68d0-462c-adb4-22ae32355bb6" (UID: "1b8b65c9-68d0-462c-adb4-22ae32355bb6"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 19:25:32 crc kubenswrapper[4779]: I0929 19:25:32.735634 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1b8b65c9-68d0-462c-adb4-22ae32355bb6-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "1b8b65c9-68d0-462c-adb4-22ae32355bb6" (UID: "1b8b65c9-68d0-462c-adb4-22ae32355bb6"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 19:25:32 crc kubenswrapper[4779]: I0929 19:25:32.735675 4779 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1b8b65c9-68d0-462c-adb4-22ae32355bb6-logs\") on node \"crc\" DevicePath \"\"" Sep 29 19:25:32 crc kubenswrapper[4779]: I0929 19:25:32.735689 4779 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/259eb000-7d67-43dc-9736-6ae36eb29098-logs\") on node \"crc\" DevicePath \"\"" Sep 29 19:25:32 crc kubenswrapper[4779]: I0929 19:25:32.735699 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-g7hkf\" (UniqueName: \"kubernetes.io/projected/259eb000-7d67-43dc-9736-6ae36eb29098-kube-api-access-g7hkf\") on node \"crc\" DevicePath \"\"" Sep 29 19:25:32 crc kubenswrapper[4779]: I0929 19:25:32.735727 4779 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/259eb000-7d67-43dc-9736-6ae36eb29098-scripts\") on node \"crc\" DevicePath \"\"" Sep 29 19:25:32 crc kubenswrapper[4779]: I0929 19:25:32.735735 4779 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/259eb000-7d67-43dc-9736-6ae36eb29098-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 19:25:32 crc kubenswrapper[4779]: I0929 19:25:32.735744 4779 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/259eb000-7d67-43dc-9736-6ae36eb29098-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Sep 29 19:25:32 crc kubenswrapper[4779]: I0929 19:25:32.740905 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1b8b65c9-68d0-462c-adb4-22ae32355bb6-scripts" (OuterVolumeSpecName: "scripts") pod "1b8b65c9-68d0-462c-adb4-22ae32355bb6" (UID: "1b8b65c9-68d0-462c-adb4-22ae32355bb6"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:25:32 crc kubenswrapper[4779]: I0929 19:25:32.742018 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage12-crc" (OuterVolumeSpecName: "glance") pod "1b8b65c9-68d0-462c-adb4-22ae32355bb6" (UID: "1b8b65c9-68d0-462c-adb4-22ae32355bb6"). InnerVolumeSpecName "local-storage12-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Sep 29 19:25:32 crc kubenswrapper[4779]: I0929 19:25:32.743177 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1b8b65c9-68d0-462c-adb4-22ae32355bb6-kube-api-access-rgs45" (OuterVolumeSpecName: "kube-api-access-rgs45") pod "1b8b65c9-68d0-462c-adb4-22ae32355bb6" (UID: "1b8b65c9-68d0-462c-adb4-22ae32355bb6"). InnerVolumeSpecName "kube-api-access-rgs45". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:25:32 crc kubenswrapper[4779]: I0929 19:25:32.744772 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09860743-67e8-4aa3-9814-90b13f91317e-kube-api-access-dtt98" (OuterVolumeSpecName: "kube-api-access-dtt98") pod "09860743-67e8-4aa3-9814-90b13f91317e" (UID: "09860743-67e8-4aa3-9814-90b13f91317e"). InnerVolumeSpecName "kube-api-access-dtt98". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:25:32 crc kubenswrapper[4779]: I0929 19:25:32.761234 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1b8b65c9-68d0-462c-adb4-22ae32355bb6-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "1b8b65c9-68d0-462c-adb4-22ae32355bb6" (UID: "1b8b65c9-68d0-462c-adb4-22ae32355bb6"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:25:32 crc kubenswrapper[4779]: I0929 19:25:32.783582 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09860743-67e8-4aa3-9814-90b13f91317e-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "09860743-67e8-4aa3-9814-90b13f91317e" (UID: "09860743-67e8-4aa3-9814-90b13f91317e"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:25:32 crc kubenswrapper[4779]: I0929 19:25:32.783787 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1b8b65c9-68d0-462c-adb4-22ae32355bb6-config-data" (OuterVolumeSpecName: "config-data") pod "1b8b65c9-68d0-462c-adb4-22ae32355bb6" (UID: "1b8b65c9-68d0-462c-adb4-22ae32355bb6"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:25:32 crc kubenswrapper[4779]: I0929 19:25:32.788406 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09860743-67e8-4aa3-9814-90b13f91317e-config" (OuterVolumeSpecName: "config") pod "09860743-67e8-4aa3-9814-90b13f91317e" (UID: "09860743-67e8-4aa3-9814-90b13f91317e"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:25:32 crc kubenswrapper[4779]: I0929 19:25:32.792761 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09860743-67e8-4aa3-9814-90b13f91317e-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "09860743-67e8-4aa3-9814-90b13f91317e" (UID: "09860743-67e8-4aa3-9814-90b13f91317e"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:25:32 crc kubenswrapper[4779]: I0929 19:25:32.798525 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1b8b65c9-68d0-462c-adb4-22ae32355bb6-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "1b8b65c9-68d0-462c-adb4-22ae32355bb6" (UID: "1b8b65c9-68d0-462c-adb4-22ae32355bb6"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:25:32 crc kubenswrapper[4779]: I0929 19:25:32.802222 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09860743-67e8-4aa3-9814-90b13f91317e-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "09860743-67e8-4aa3-9814-90b13f91317e" (UID: "09860743-67e8-4aa3-9814-90b13f91317e"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:25:32 crc kubenswrapper[4779]: I0929 19:25:32.803174 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09860743-67e8-4aa3-9814-90b13f91317e-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "09860743-67e8-4aa3-9814-90b13f91317e" (UID: "09860743-67e8-4aa3-9814-90b13f91317e"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:25:32 crc kubenswrapper[4779]: I0929 19:25:32.837168 4779 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/09860743-67e8-4aa3-9814-90b13f91317e-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 29 19:25:32 crc kubenswrapper[4779]: I0929 19:25:32.837215 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rgs45\" (UniqueName: \"kubernetes.io/projected/1b8b65c9-68d0-462c-adb4-22ae32355bb6-kube-api-access-rgs45\") on node \"crc\" DevicePath \"\"" Sep 29 19:25:32 crc kubenswrapper[4779]: I0929 19:25:32.837232 4779 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1b8b65c9-68d0-462c-adb4-22ae32355bb6-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 19:25:32 crc kubenswrapper[4779]: I0929 19:25:32.837245 4779 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/09860743-67e8-4aa3-9814-90b13f91317e-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Sep 29 19:25:32 crc kubenswrapper[4779]: I0929 19:25:32.837260 4779 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1b8b65c9-68d0-462c-adb4-22ae32355bb6-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 19:25:32 crc kubenswrapper[4779]: I0929 19:25:32.837288 4779 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") on node \"crc\" " Sep 29 19:25:32 crc kubenswrapper[4779]: I0929 19:25:32.837301 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dtt98\" (UniqueName: \"kubernetes.io/projected/09860743-67e8-4aa3-9814-90b13f91317e-kube-api-access-dtt98\") on node \"crc\" DevicePath \"\"" Sep 29 19:25:32 crc kubenswrapper[4779]: I0929 19:25:32.837334 4779 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/09860743-67e8-4aa3-9814-90b13f91317e-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Sep 29 19:25:32 crc kubenswrapper[4779]: I0929 19:25:32.837346 4779 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/09860743-67e8-4aa3-9814-90b13f91317e-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Sep 29 19:25:32 crc kubenswrapper[4779]: I0929 19:25:32.837357 4779 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/1b8b65c9-68d0-462c-adb4-22ae32355bb6-httpd-run\") on node \"crc\" DevicePath \"\"" Sep 29 19:25:32 crc kubenswrapper[4779]: I0929 19:25:32.837370 4779 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1b8b65c9-68d0-462c-adb4-22ae32355bb6-scripts\") on node \"crc\" DevicePath \"\"" Sep 29 19:25:32 crc kubenswrapper[4779]: I0929 19:25:32.837381 4779 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09860743-67e8-4aa3-9814-90b13f91317e-config\") on node \"crc\" DevicePath \"\"" Sep 29 19:25:32 crc kubenswrapper[4779]: I0929 19:25:32.837392 4779 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/1b8b65c9-68d0-462c-adb4-22ae32355bb6-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Sep 29 19:25:32 crc kubenswrapper[4779]: I0929 19:25:32.864296 4779 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage12-crc" (UniqueName: "kubernetes.io/local-volume/local-storage12-crc") on node "crc" Sep 29 19:25:32 crc kubenswrapper[4779]: I0929 19:25:32.939135 4779 reconciler_common.go:293] "Volume detached for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") on node \"crc\" DevicePath \"\"" Sep 29 19:25:32 crc kubenswrapper[4779]: I0929 19:25:32.967638 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-fc6fd7df6-btpzz"] Sep 29 19:25:32 crc kubenswrapper[4779]: I0929 19:25:32.982019 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-5f48-account-create-vt6p4"] Sep 29 19:25:32 crc kubenswrapper[4779]: I0929 19:25:32.988802 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-df0d-account-create-fhcpp"] Sep 29 19:25:32 crc kubenswrapper[4779]: I0929 19:25:32.993521 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-59998c8f8c-xtjqn" event={"ID":"259eb000-7d67-43dc-9736-6ae36eb29098","Type":"ContainerDied","Data":"3b09fd77bc6bddc906742212a5c2ff921431c059faeeaa939c97338c42d5ac2f"} Sep 29 19:25:32 crc kubenswrapper[4779]: I0929 19:25:32.993549 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-59998c8f8c-xtjqn" Sep 29 19:25:32 crc kubenswrapper[4779]: I0929 19:25:32.995547 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-6f7f5b6d48-8js86"] Sep 29 19:25:32 crc kubenswrapper[4779]: I0929 19:25:32.999637 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-6c8fdd957c-ft6jd"] Sep 29 19:25:33 crc kubenswrapper[4779]: I0929 19:25:33.008700 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Sep 29 19:25:33 crc kubenswrapper[4779]: I0929 19:25:33.008722 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"1b8b65c9-68d0-462c-adb4-22ae32355bb6","Type":"ContainerDied","Data":"faefa4187170e01aec517e89cd50acfdb6f05acd24329875b5028c9d3d60e30d"} Sep 29 19:25:33 crc kubenswrapper[4779]: I0929 19:25:33.008766 4779 scope.go:117] "RemoveContainer" containerID="d1f7b1883c123b4c55039c83bb20002226027b09ffa31124b8786369ceb92d9c" Sep 29 19:25:33 crc kubenswrapper[4779]: I0929 19:25:33.015904 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-cq5wz" Sep 29 19:25:33 crc kubenswrapper[4779]: I0929 19:25:33.017062 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-cq5wz" event={"ID":"3616fd00-99ce-4801-bdc8-90174bac56ba","Type":"ContainerDied","Data":"fad94c68f0a94c3030121ef5d7888fdb44f94cabe48e52831b333e8c298595d9"} Sep 29 19:25:33 crc kubenswrapper[4779]: I0929 19:25:33.017103 4779 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="fad94c68f0a94c3030121ef5d7888fdb44f94cabe48e52831b333e8c298595d9" Sep 29 19:25:33 crc kubenswrapper[4779]: I0929 19:25:33.020286 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-xphf4" event={"ID":"48f7f919-2b5f-4954-8aa8-0b0abf3cb4fe","Type":"ContainerStarted","Data":"f5e83d93913e6d73ab31cd72f16ff3e845ff2ee36708c4e494e4182e7f54e320"} Sep 29 19:25:33 crc kubenswrapper[4779]: I0929 19:25:33.020821 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-5f5e-account-create-2n6rt"] Sep 29 19:25:33 crc kubenswrapper[4779]: I0929 19:25:33.038342 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-db-sync-xphf4" podStartSLOduration=2.402929176 podStartE2EDuration="19.038303811s" podCreationTimestamp="2025-09-29 19:25:14 +0000 UTC" firstStartedPulling="2025-09-29 19:25:15.472129462 +0000 UTC m=+1026.356554562" lastFinishedPulling="2025-09-29 19:25:32.107504097 +0000 UTC m=+1042.991929197" observedRunningTime="2025-09-29 19:25:33.038215498 +0000 UTC m=+1043.922640608" watchObservedRunningTime="2025-09-29 19:25:33.038303811 +0000 UTC m=+1043.922728911" Sep 29 19:25:33 crc kubenswrapper[4779]: I0929 19:25:33.039942 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-77585f5f8c-zkpbk" event={"ID":"09860743-67e8-4aa3-9814-90b13f91317e","Type":"ContainerDied","Data":"5b04d55fae55c80bef39a75a8e83ea45a9b90659b64abee32ec86f04bea1ceef"} Sep 29 19:25:33 crc kubenswrapper[4779]: I0929 19:25:33.040377 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-77585f5f8c-zkpbk" Sep 29 19:25:33 crc kubenswrapper[4779]: I0929 19:25:33.112343 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-77585f5f8c-zkpbk"] Sep 29 19:25:33 crc kubenswrapper[4779]: I0929 19:25:33.119211 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-77585f5f8c-zkpbk"] Sep 29 19:25:33 crc kubenswrapper[4779]: I0929 19:25:33.126095 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Sep 29 19:25:33 crc kubenswrapper[4779]: I0929 19:25:33.134744 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-internal-api-0"] Sep 29 19:25:33 crc kubenswrapper[4779]: I0929 19:25:33.169532 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Sep 29 19:25:33 crc kubenswrapper[4779]: E0929 19:25:33.169975 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="09860743-67e8-4aa3-9814-90b13f91317e" containerName="dnsmasq-dns" Sep 29 19:25:33 crc kubenswrapper[4779]: I0929 19:25:33.169994 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="09860743-67e8-4aa3-9814-90b13f91317e" containerName="dnsmasq-dns" Sep 29 19:25:33 crc kubenswrapper[4779]: E0929 19:25:33.170011 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1b8b65c9-68d0-462c-adb4-22ae32355bb6" containerName="glance-httpd" Sep 29 19:25:33 crc kubenswrapper[4779]: I0929 19:25:33.170017 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="1b8b65c9-68d0-462c-adb4-22ae32355bb6" containerName="glance-httpd" Sep 29 19:25:33 crc kubenswrapper[4779]: E0929 19:25:33.170033 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="09860743-67e8-4aa3-9814-90b13f91317e" containerName="init" Sep 29 19:25:33 crc kubenswrapper[4779]: I0929 19:25:33.170038 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="09860743-67e8-4aa3-9814-90b13f91317e" containerName="init" Sep 29 19:25:33 crc kubenswrapper[4779]: E0929 19:25:33.170067 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3616fd00-99ce-4801-bdc8-90174bac56ba" containerName="keystone-bootstrap" Sep 29 19:25:33 crc kubenswrapper[4779]: I0929 19:25:33.170073 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="3616fd00-99ce-4801-bdc8-90174bac56ba" containerName="keystone-bootstrap" Sep 29 19:25:33 crc kubenswrapper[4779]: E0929 19:25:33.170081 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1b8b65c9-68d0-462c-adb4-22ae32355bb6" containerName="glance-log" Sep 29 19:25:33 crc kubenswrapper[4779]: I0929 19:25:33.170087 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="1b8b65c9-68d0-462c-adb4-22ae32355bb6" containerName="glance-log" Sep 29 19:25:33 crc kubenswrapper[4779]: I0929 19:25:33.170254 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="3616fd00-99ce-4801-bdc8-90174bac56ba" containerName="keystone-bootstrap" Sep 29 19:25:33 crc kubenswrapper[4779]: I0929 19:25:33.170274 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="1b8b65c9-68d0-462c-adb4-22ae32355bb6" containerName="glance-log" Sep 29 19:25:33 crc kubenswrapper[4779]: I0929 19:25:33.170284 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="1b8b65c9-68d0-462c-adb4-22ae32355bb6" containerName="glance-httpd" Sep 29 19:25:33 crc kubenswrapper[4779]: I0929 19:25:33.170295 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="09860743-67e8-4aa3-9814-90b13f91317e" containerName="dnsmasq-dns" Sep 29 19:25:33 crc kubenswrapper[4779]: I0929 19:25:33.171245 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Sep 29 19:25:33 crc kubenswrapper[4779]: I0929 19:25:33.178028 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-internal-svc" Sep 29 19:25:33 crc kubenswrapper[4779]: I0929 19:25:33.180166 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Sep 29 19:25:33 crc kubenswrapper[4779]: I0929 19:25:33.195154 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-59998c8f8c-xtjqn"] Sep 29 19:25:33 crc kubenswrapper[4779]: I0929 19:25:33.204554 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-59998c8f8c-xtjqn"] Sep 29 19:25:33 crc kubenswrapper[4779]: I0929 19:25:33.223173 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Sep 29 19:25:33 crc kubenswrapper[4779]: I0929 19:25:33.245619 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kvf7j\" (UniqueName: \"kubernetes.io/projected/81b272b7-f428-4787-b48e-3afcf7e4c8d0-kube-api-access-kvf7j\") pod \"glance-default-internal-api-0\" (UID: \"81b272b7-f428-4787-b48e-3afcf7e4c8d0\") " pod="openstack/glance-default-internal-api-0" Sep 29 19:25:33 crc kubenswrapper[4779]: I0929 19:25:33.245844 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/81b272b7-f428-4787-b48e-3afcf7e4c8d0-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"81b272b7-f428-4787-b48e-3afcf7e4c8d0\") " pod="openstack/glance-default-internal-api-0" Sep 29 19:25:33 crc kubenswrapper[4779]: I0929 19:25:33.245880 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/81b272b7-f428-4787-b48e-3afcf7e4c8d0-scripts\") pod \"glance-default-internal-api-0\" (UID: \"81b272b7-f428-4787-b48e-3afcf7e4c8d0\") " pod="openstack/glance-default-internal-api-0" Sep 29 19:25:33 crc kubenswrapper[4779]: I0929 19:25:33.245979 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/81b272b7-f428-4787-b48e-3afcf7e4c8d0-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"81b272b7-f428-4787-b48e-3afcf7e4c8d0\") " pod="openstack/glance-default-internal-api-0" Sep 29 19:25:33 crc kubenswrapper[4779]: I0929 19:25:33.246046 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/81b272b7-f428-4787-b48e-3afcf7e4c8d0-logs\") pod \"glance-default-internal-api-0\" (UID: \"81b272b7-f428-4787-b48e-3afcf7e4c8d0\") " pod="openstack/glance-default-internal-api-0" Sep 29 19:25:33 crc kubenswrapper[4779]: I0929 19:25:33.246068 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"glance-default-internal-api-0\" (UID: \"81b272b7-f428-4787-b48e-3afcf7e4c8d0\") " pod="openstack/glance-default-internal-api-0" Sep 29 19:25:33 crc kubenswrapper[4779]: I0929 19:25:33.246111 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/81b272b7-f428-4787-b48e-3afcf7e4c8d0-config-data\") pod \"glance-default-internal-api-0\" (UID: \"81b272b7-f428-4787-b48e-3afcf7e4c8d0\") " pod="openstack/glance-default-internal-api-0" Sep 29 19:25:33 crc kubenswrapper[4779]: I0929 19:25:33.246282 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/81b272b7-f428-4787-b48e-3afcf7e4c8d0-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"81b272b7-f428-4787-b48e-3afcf7e4c8d0\") " pod="openstack/glance-default-internal-api-0" Sep 29 19:25:33 crc kubenswrapper[4779]: I0929 19:25:33.348205 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/81b272b7-f428-4787-b48e-3afcf7e4c8d0-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"81b272b7-f428-4787-b48e-3afcf7e4c8d0\") " pod="openstack/glance-default-internal-api-0" Sep 29 19:25:33 crc kubenswrapper[4779]: I0929 19:25:33.348547 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kvf7j\" (UniqueName: \"kubernetes.io/projected/81b272b7-f428-4787-b48e-3afcf7e4c8d0-kube-api-access-kvf7j\") pod \"glance-default-internal-api-0\" (UID: \"81b272b7-f428-4787-b48e-3afcf7e4c8d0\") " pod="openstack/glance-default-internal-api-0" Sep 29 19:25:33 crc kubenswrapper[4779]: I0929 19:25:33.348591 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/81b272b7-f428-4787-b48e-3afcf7e4c8d0-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"81b272b7-f428-4787-b48e-3afcf7e4c8d0\") " pod="openstack/glance-default-internal-api-0" Sep 29 19:25:33 crc kubenswrapper[4779]: I0929 19:25:33.348741 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/81b272b7-f428-4787-b48e-3afcf7e4c8d0-scripts\") pod \"glance-default-internal-api-0\" (UID: \"81b272b7-f428-4787-b48e-3afcf7e4c8d0\") " pod="openstack/glance-default-internal-api-0" Sep 29 19:25:33 crc kubenswrapper[4779]: I0929 19:25:33.348833 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/81b272b7-f428-4787-b48e-3afcf7e4c8d0-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"81b272b7-f428-4787-b48e-3afcf7e4c8d0\") " pod="openstack/glance-default-internal-api-0" Sep 29 19:25:33 crc kubenswrapper[4779]: I0929 19:25:33.348892 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/81b272b7-f428-4787-b48e-3afcf7e4c8d0-logs\") pod \"glance-default-internal-api-0\" (UID: \"81b272b7-f428-4787-b48e-3afcf7e4c8d0\") " pod="openstack/glance-default-internal-api-0" Sep 29 19:25:33 crc kubenswrapper[4779]: I0929 19:25:33.348936 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"glance-default-internal-api-0\" (UID: \"81b272b7-f428-4787-b48e-3afcf7e4c8d0\") " pod="openstack/glance-default-internal-api-0" Sep 29 19:25:33 crc kubenswrapper[4779]: I0929 19:25:33.348975 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/81b272b7-f428-4787-b48e-3afcf7e4c8d0-config-data\") pod \"glance-default-internal-api-0\" (UID: \"81b272b7-f428-4787-b48e-3afcf7e4c8d0\") " pod="openstack/glance-default-internal-api-0" Sep 29 19:25:33 crc kubenswrapper[4779]: I0929 19:25:33.355041 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/81b272b7-f428-4787-b48e-3afcf7e4c8d0-scripts\") pod \"glance-default-internal-api-0\" (UID: \"81b272b7-f428-4787-b48e-3afcf7e4c8d0\") " pod="openstack/glance-default-internal-api-0" Sep 29 19:25:33 crc kubenswrapper[4779]: I0929 19:25:33.355147 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/81b272b7-f428-4787-b48e-3afcf7e4c8d0-config-data\") pod \"glance-default-internal-api-0\" (UID: \"81b272b7-f428-4787-b48e-3afcf7e4c8d0\") " pod="openstack/glance-default-internal-api-0" Sep 29 19:25:33 crc kubenswrapper[4779]: I0929 19:25:33.355565 4779 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"glance-default-internal-api-0\" (UID: \"81b272b7-f428-4787-b48e-3afcf7e4c8d0\") device mount path \"/mnt/openstack/pv12\"" pod="openstack/glance-default-internal-api-0" Sep 29 19:25:33 crc kubenswrapper[4779]: I0929 19:25:33.363668 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/81b272b7-f428-4787-b48e-3afcf7e4c8d0-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"81b272b7-f428-4787-b48e-3afcf7e4c8d0\") " pod="openstack/glance-default-internal-api-0" Sep 29 19:25:33 crc kubenswrapper[4779]: I0929 19:25:33.364135 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/81b272b7-f428-4787-b48e-3afcf7e4c8d0-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"81b272b7-f428-4787-b48e-3afcf7e4c8d0\") " pod="openstack/glance-default-internal-api-0" Sep 29 19:25:33 crc kubenswrapper[4779]: I0929 19:25:33.364342 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/81b272b7-f428-4787-b48e-3afcf7e4c8d0-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"81b272b7-f428-4787-b48e-3afcf7e4c8d0\") " pod="openstack/glance-default-internal-api-0" Sep 29 19:25:33 crc kubenswrapper[4779]: I0929 19:25:33.373515 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/81b272b7-f428-4787-b48e-3afcf7e4c8d0-logs\") pod \"glance-default-internal-api-0\" (UID: \"81b272b7-f428-4787-b48e-3afcf7e4c8d0\") " pod="openstack/glance-default-internal-api-0" Sep 29 19:25:33 crc kubenswrapper[4779]: I0929 19:25:33.376920 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kvf7j\" (UniqueName: \"kubernetes.io/projected/81b272b7-f428-4787-b48e-3afcf7e4c8d0-kube-api-access-kvf7j\") pod \"glance-default-internal-api-0\" (UID: \"81b272b7-f428-4787-b48e-3afcf7e4c8d0\") " pod="openstack/glance-default-internal-api-0" Sep 29 19:25:33 crc kubenswrapper[4779]: I0929 19:25:33.380491 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-bootstrap-cq5wz"] Sep 29 19:25:33 crc kubenswrapper[4779]: I0929 19:25:33.394364 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"glance-default-internal-api-0\" (UID: \"81b272b7-f428-4787-b48e-3afcf7e4c8d0\") " pod="openstack/glance-default-internal-api-0" Sep 29 19:25:33 crc kubenswrapper[4779]: I0929 19:25:33.415531 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-bootstrap-cq5wz"] Sep 29 19:25:33 crc kubenswrapper[4779]: I0929 19:25:33.469505 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-bootstrap-nrnv2"] Sep 29 19:25:33 crc kubenswrapper[4779]: I0929 19:25:33.480707 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-nrnv2" Sep 29 19:25:33 crc kubenswrapper[4779]: I0929 19:25:33.480911 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-nrnv2"] Sep 29 19:25:33 crc kubenswrapper[4779]: I0929 19:25:33.482281 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Sep 29 19:25:33 crc kubenswrapper[4779]: I0929 19:25:33.482399 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Sep 29 19:25:33 crc kubenswrapper[4779]: I0929 19:25:33.483579 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Sep 29 19:25:33 crc kubenswrapper[4779]: I0929 19:25:33.486753 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-m8zpl" Sep 29 19:25:33 crc kubenswrapper[4779]: I0929 19:25:33.502001 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Sep 29 19:25:33 crc kubenswrapper[4779]: W0929 19:25:33.504339 4779 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod35f86f4e_1390_4e24_bd8e_2a5cd9899d29.slice/crio-bbc226eca98d15120961c8e80d4965a0d9f8d170d01f06fc67909a81e7328d69 WatchSource:0}: Error finding container bbc226eca98d15120961c8e80d4965a0d9f8d170d01f06fc67909a81e7328d69: Status 404 returned error can't find the container with id bbc226eca98d15120961c8e80d4965a0d9f8d170d01f06fc67909a81e7328d69 Sep 29 19:25:33 crc kubenswrapper[4779]: W0929 19:25:33.515535 4779 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6cd722c9_4e9b_4bad_a9fd_84529803680b.slice/crio-738cd1f6790ee89393458550f844fec2f3b52411b6d77de3cdc49e9c5b894f4e WatchSource:0}: Error finding container 738cd1f6790ee89393458550f844fec2f3b52411b6d77de3cdc49e9c5b894f4e: Status 404 returned error can't find the container with id 738cd1f6790ee89393458550f844fec2f3b52411b6d77de3cdc49e9c5b894f4e Sep 29 19:25:33 crc kubenswrapper[4779]: I0929 19:25:33.526078 4779 scope.go:117] "RemoveContainer" containerID="ebe3e1149816bdfb832b59a76ed9a53d5b40c95b140b0be6bc5e81336eae6cbd" Sep 29 19:25:33 crc kubenswrapper[4779]: I0929 19:25:33.552207 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/2a64e434-cdce-47ed-9c44-ab5109920fc7-fernet-keys\") pod \"keystone-bootstrap-nrnv2\" (UID: \"2a64e434-cdce-47ed-9c44-ab5109920fc7\") " pod="openstack/keystone-bootstrap-nrnv2" Sep 29 19:25:33 crc kubenswrapper[4779]: I0929 19:25:33.552284 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2a64e434-cdce-47ed-9c44-ab5109920fc7-config-data\") pod \"keystone-bootstrap-nrnv2\" (UID: \"2a64e434-cdce-47ed-9c44-ab5109920fc7\") " pod="openstack/keystone-bootstrap-nrnv2" Sep 29 19:25:33 crc kubenswrapper[4779]: I0929 19:25:33.552309 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/2a64e434-cdce-47ed-9c44-ab5109920fc7-credential-keys\") pod \"keystone-bootstrap-nrnv2\" (UID: \"2a64e434-cdce-47ed-9c44-ab5109920fc7\") " pod="openstack/keystone-bootstrap-nrnv2" Sep 29 19:25:33 crc kubenswrapper[4779]: I0929 19:25:33.552371 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2a64e434-cdce-47ed-9c44-ab5109920fc7-scripts\") pod \"keystone-bootstrap-nrnv2\" (UID: \"2a64e434-cdce-47ed-9c44-ab5109920fc7\") " pod="openstack/keystone-bootstrap-nrnv2" Sep 29 19:25:33 crc kubenswrapper[4779]: I0929 19:25:33.552403 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2a64e434-cdce-47ed-9c44-ab5109920fc7-combined-ca-bundle\") pod \"keystone-bootstrap-nrnv2\" (UID: \"2a64e434-cdce-47ed-9c44-ab5109920fc7\") " pod="openstack/keystone-bootstrap-nrnv2" Sep 29 19:25:33 crc kubenswrapper[4779]: I0929 19:25:33.552426 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2pk8j\" (UniqueName: \"kubernetes.io/projected/2a64e434-cdce-47ed-9c44-ab5109920fc7-kube-api-access-2pk8j\") pod \"keystone-bootstrap-nrnv2\" (UID: \"2a64e434-cdce-47ed-9c44-ab5109920fc7\") " pod="openstack/keystone-bootstrap-nrnv2" Sep 29 19:25:33 crc kubenswrapper[4779]: I0929 19:25:33.564366 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-5f7856dd6f-tbtnj" Sep 29 19:25:33 crc kubenswrapper[4779]: I0929 19:25:33.572969 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Sep 29 19:25:34 crc kubenswrapper[4779]: I0929 19:25:33.610243 4779 scope.go:117] "RemoveContainer" containerID="501b263905e99102640de533c53dee9756db975bff779ac2b3171660964d3b35" Sep 29 19:25:34 crc kubenswrapper[4779]: I0929 19:25:33.652923 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"016ec3ae-ba64-4283-9dd1-1387254c5501\" (UID: \"016ec3ae-ba64-4283-9dd1-1387254c5501\") " Sep 29 19:25:34 crc kubenswrapper[4779]: I0929 19:25:33.653021 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-c7bkb\" (UniqueName: \"kubernetes.io/projected/2a8f0557-90ad-401d-8bd2-702615ae52d9-kube-api-access-c7bkb\") pod \"2a8f0557-90ad-401d-8bd2-702615ae52d9\" (UID: \"2a8f0557-90ad-401d-8bd2-702615ae52d9\") " Sep 29 19:25:34 crc kubenswrapper[4779]: I0929 19:25:33.653121 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/016ec3ae-ba64-4283-9dd1-1387254c5501-config-data\") pod \"016ec3ae-ba64-4283-9dd1-1387254c5501\" (UID: \"016ec3ae-ba64-4283-9dd1-1387254c5501\") " Sep 29 19:25:34 crc kubenswrapper[4779]: I0929 19:25:33.653180 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/016ec3ae-ba64-4283-9dd1-1387254c5501-public-tls-certs\") pod \"016ec3ae-ba64-4283-9dd1-1387254c5501\" (UID: \"016ec3ae-ba64-4283-9dd1-1387254c5501\") " Sep 29 19:25:34 crc kubenswrapper[4779]: I0929 19:25:33.653196 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/016ec3ae-ba64-4283-9dd1-1387254c5501-combined-ca-bundle\") pod \"016ec3ae-ba64-4283-9dd1-1387254c5501\" (UID: \"016ec3ae-ba64-4283-9dd1-1387254c5501\") " Sep 29 19:25:34 crc kubenswrapper[4779]: I0929 19:25:33.653233 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/2a8f0557-90ad-401d-8bd2-702615ae52d9-config-data\") pod \"2a8f0557-90ad-401d-8bd2-702615ae52d9\" (UID: \"2a8f0557-90ad-401d-8bd2-702615ae52d9\") " Sep 29 19:25:34 crc kubenswrapper[4779]: I0929 19:25:33.653248 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2a8f0557-90ad-401d-8bd2-702615ae52d9-logs\") pod \"2a8f0557-90ad-401d-8bd2-702615ae52d9\" (UID: \"2a8f0557-90ad-401d-8bd2-702615ae52d9\") " Sep 29 19:25:34 crc kubenswrapper[4779]: I0929 19:25:33.653272 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/2a8f0557-90ad-401d-8bd2-702615ae52d9-scripts\") pod \"2a8f0557-90ad-401d-8bd2-702615ae52d9\" (UID: \"2a8f0557-90ad-401d-8bd2-702615ae52d9\") " Sep 29 19:25:34 crc kubenswrapper[4779]: I0929 19:25:33.653310 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/016ec3ae-ba64-4283-9dd1-1387254c5501-httpd-run\") pod \"016ec3ae-ba64-4283-9dd1-1387254c5501\" (UID: \"016ec3ae-ba64-4283-9dd1-1387254c5501\") " Sep 29 19:25:34 crc kubenswrapper[4779]: I0929 19:25:33.653367 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tgts6\" (UniqueName: \"kubernetes.io/projected/016ec3ae-ba64-4283-9dd1-1387254c5501-kube-api-access-tgts6\") pod \"016ec3ae-ba64-4283-9dd1-1387254c5501\" (UID: \"016ec3ae-ba64-4283-9dd1-1387254c5501\") " Sep 29 19:25:34 crc kubenswrapper[4779]: I0929 19:25:33.653446 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/2a8f0557-90ad-401d-8bd2-702615ae52d9-horizon-secret-key\") pod \"2a8f0557-90ad-401d-8bd2-702615ae52d9\" (UID: \"2a8f0557-90ad-401d-8bd2-702615ae52d9\") " Sep 29 19:25:34 crc kubenswrapper[4779]: I0929 19:25:33.653470 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/016ec3ae-ba64-4283-9dd1-1387254c5501-scripts\") pod \"016ec3ae-ba64-4283-9dd1-1387254c5501\" (UID: \"016ec3ae-ba64-4283-9dd1-1387254c5501\") " Sep 29 19:25:34 crc kubenswrapper[4779]: I0929 19:25:33.653522 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/016ec3ae-ba64-4283-9dd1-1387254c5501-logs\") pod \"016ec3ae-ba64-4283-9dd1-1387254c5501\" (UID: \"016ec3ae-ba64-4283-9dd1-1387254c5501\") " Sep 29 19:25:34 crc kubenswrapper[4779]: I0929 19:25:33.653745 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2a64e434-cdce-47ed-9c44-ab5109920fc7-combined-ca-bundle\") pod \"keystone-bootstrap-nrnv2\" (UID: \"2a64e434-cdce-47ed-9c44-ab5109920fc7\") " pod="openstack/keystone-bootstrap-nrnv2" Sep 29 19:25:34 crc kubenswrapper[4779]: I0929 19:25:33.653767 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2pk8j\" (UniqueName: \"kubernetes.io/projected/2a64e434-cdce-47ed-9c44-ab5109920fc7-kube-api-access-2pk8j\") pod \"keystone-bootstrap-nrnv2\" (UID: \"2a64e434-cdce-47ed-9c44-ab5109920fc7\") " pod="openstack/keystone-bootstrap-nrnv2" Sep 29 19:25:34 crc kubenswrapper[4779]: I0929 19:25:33.653813 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/2a64e434-cdce-47ed-9c44-ab5109920fc7-fernet-keys\") pod \"keystone-bootstrap-nrnv2\" (UID: \"2a64e434-cdce-47ed-9c44-ab5109920fc7\") " pod="openstack/keystone-bootstrap-nrnv2" Sep 29 19:25:34 crc kubenswrapper[4779]: I0929 19:25:33.653862 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2a64e434-cdce-47ed-9c44-ab5109920fc7-config-data\") pod \"keystone-bootstrap-nrnv2\" (UID: \"2a64e434-cdce-47ed-9c44-ab5109920fc7\") " pod="openstack/keystone-bootstrap-nrnv2" Sep 29 19:25:34 crc kubenswrapper[4779]: I0929 19:25:33.653881 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/2a64e434-cdce-47ed-9c44-ab5109920fc7-credential-keys\") pod \"keystone-bootstrap-nrnv2\" (UID: \"2a64e434-cdce-47ed-9c44-ab5109920fc7\") " pod="openstack/keystone-bootstrap-nrnv2" Sep 29 19:25:34 crc kubenswrapper[4779]: I0929 19:25:33.653928 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2a64e434-cdce-47ed-9c44-ab5109920fc7-scripts\") pod \"keystone-bootstrap-nrnv2\" (UID: \"2a64e434-cdce-47ed-9c44-ab5109920fc7\") " pod="openstack/keystone-bootstrap-nrnv2" Sep 29 19:25:34 crc kubenswrapper[4779]: I0929 19:25:33.654358 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/016ec3ae-ba64-4283-9dd1-1387254c5501-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "016ec3ae-ba64-4283-9dd1-1387254c5501" (UID: "016ec3ae-ba64-4283-9dd1-1387254c5501"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 19:25:34 crc kubenswrapper[4779]: I0929 19:25:33.657641 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/2a64e434-cdce-47ed-9c44-ab5109920fc7-fernet-keys\") pod \"keystone-bootstrap-nrnv2\" (UID: \"2a64e434-cdce-47ed-9c44-ab5109920fc7\") " pod="openstack/keystone-bootstrap-nrnv2" Sep 29 19:25:34 crc kubenswrapper[4779]: I0929 19:25:33.657910 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2a64e434-cdce-47ed-9c44-ab5109920fc7-combined-ca-bundle\") pod \"keystone-bootstrap-nrnv2\" (UID: \"2a64e434-cdce-47ed-9c44-ab5109920fc7\") " pod="openstack/keystone-bootstrap-nrnv2" Sep 29 19:25:34 crc kubenswrapper[4779]: I0929 19:25:33.658143 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/016ec3ae-ba64-4283-9dd1-1387254c5501-logs" (OuterVolumeSpecName: "logs") pod "016ec3ae-ba64-4283-9dd1-1387254c5501" (UID: "016ec3ae-ba64-4283-9dd1-1387254c5501"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 19:25:34 crc kubenswrapper[4779]: I0929 19:25:33.659985 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/016ec3ae-ba64-4283-9dd1-1387254c5501-scripts" (OuterVolumeSpecName: "scripts") pod "016ec3ae-ba64-4283-9dd1-1387254c5501" (UID: "016ec3ae-ba64-4283-9dd1-1387254c5501"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:25:34 crc kubenswrapper[4779]: I0929 19:25:33.661768 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2a8f0557-90ad-401d-8bd2-702615ae52d9-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "2a8f0557-90ad-401d-8bd2-702615ae52d9" (UID: "2a8f0557-90ad-401d-8bd2-702615ae52d9"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:25:34 crc kubenswrapper[4779]: I0929 19:25:33.662400 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/2a64e434-cdce-47ed-9c44-ab5109920fc7-credential-keys\") pod \"keystone-bootstrap-nrnv2\" (UID: \"2a64e434-cdce-47ed-9c44-ab5109920fc7\") " pod="openstack/keystone-bootstrap-nrnv2" Sep 29 19:25:34 crc kubenswrapper[4779]: I0929 19:25:33.662589 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2a64e434-cdce-47ed-9c44-ab5109920fc7-scripts\") pod \"keystone-bootstrap-nrnv2\" (UID: \"2a64e434-cdce-47ed-9c44-ab5109920fc7\") " pod="openstack/keystone-bootstrap-nrnv2" Sep 29 19:25:34 crc kubenswrapper[4779]: I0929 19:25:33.663035 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2a8f0557-90ad-401d-8bd2-702615ae52d9-logs" (OuterVolumeSpecName: "logs") pod "2a8f0557-90ad-401d-8bd2-702615ae52d9" (UID: "2a8f0557-90ad-401d-8bd2-702615ae52d9"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 19:25:34 crc kubenswrapper[4779]: I0929 19:25:33.663603 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2a8f0557-90ad-401d-8bd2-702615ae52d9-scripts" (OuterVolumeSpecName: "scripts") pod "2a8f0557-90ad-401d-8bd2-702615ae52d9" (UID: "2a8f0557-90ad-401d-8bd2-702615ae52d9"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:25:34 crc kubenswrapper[4779]: I0929 19:25:33.663757 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2a8f0557-90ad-401d-8bd2-702615ae52d9-config-data" (OuterVolumeSpecName: "config-data") pod "2a8f0557-90ad-401d-8bd2-702615ae52d9" (UID: "2a8f0557-90ad-401d-8bd2-702615ae52d9"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:25:34 crc kubenswrapper[4779]: I0929 19:25:33.664030 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/016ec3ae-ba64-4283-9dd1-1387254c5501-kube-api-access-tgts6" (OuterVolumeSpecName: "kube-api-access-tgts6") pod "016ec3ae-ba64-4283-9dd1-1387254c5501" (UID: "016ec3ae-ba64-4283-9dd1-1387254c5501"). InnerVolumeSpecName "kube-api-access-tgts6". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:25:34 crc kubenswrapper[4779]: I0929 19:25:33.665035 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2a8f0557-90ad-401d-8bd2-702615ae52d9-kube-api-access-c7bkb" (OuterVolumeSpecName: "kube-api-access-c7bkb") pod "2a8f0557-90ad-401d-8bd2-702615ae52d9" (UID: "2a8f0557-90ad-401d-8bd2-702615ae52d9"). InnerVolumeSpecName "kube-api-access-c7bkb". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:25:34 crc kubenswrapper[4779]: I0929 19:25:33.668741 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2a64e434-cdce-47ed-9c44-ab5109920fc7-config-data\") pod \"keystone-bootstrap-nrnv2\" (UID: \"2a64e434-cdce-47ed-9c44-ab5109920fc7\") " pod="openstack/keystone-bootstrap-nrnv2" Sep 29 19:25:34 crc kubenswrapper[4779]: I0929 19:25:33.670946 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage08-crc" (OuterVolumeSpecName: "glance") pod "016ec3ae-ba64-4283-9dd1-1387254c5501" (UID: "016ec3ae-ba64-4283-9dd1-1387254c5501"). InnerVolumeSpecName "local-storage08-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Sep 29 19:25:34 crc kubenswrapper[4779]: I0929 19:25:33.671954 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2pk8j\" (UniqueName: \"kubernetes.io/projected/2a64e434-cdce-47ed-9c44-ab5109920fc7-kube-api-access-2pk8j\") pod \"keystone-bootstrap-nrnv2\" (UID: \"2a64e434-cdce-47ed-9c44-ab5109920fc7\") " pod="openstack/keystone-bootstrap-nrnv2" Sep 29 19:25:34 crc kubenswrapper[4779]: I0929 19:25:33.679136 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/016ec3ae-ba64-4283-9dd1-1387254c5501-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "016ec3ae-ba64-4283-9dd1-1387254c5501" (UID: "016ec3ae-ba64-4283-9dd1-1387254c5501"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:25:34 crc kubenswrapper[4779]: I0929 19:25:33.680930 4779 scope.go:117] "RemoveContainer" containerID="4251d7f94f1c1eae6eaa621a7bbdd14cf2221341173d7f4c187d74f6aab3cf09" Sep 29 19:25:34 crc kubenswrapper[4779]: I0929 19:25:33.704508 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/016ec3ae-ba64-4283-9dd1-1387254c5501-config-data" (OuterVolumeSpecName: "config-data") pod "016ec3ae-ba64-4283-9dd1-1387254c5501" (UID: "016ec3ae-ba64-4283-9dd1-1387254c5501"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:25:34 crc kubenswrapper[4779]: I0929 19:25:33.712524 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/016ec3ae-ba64-4283-9dd1-1387254c5501-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "016ec3ae-ba64-4283-9dd1-1387254c5501" (UID: "016ec3ae-ba64-4283-9dd1-1387254c5501"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:25:34 crc kubenswrapper[4779]: I0929 19:25:33.757061 4779 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") on node \"crc\" " Sep 29 19:25:34 crc kubenswrapper[4779]: I0929 19:25:33.757109 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-c7bkb\" (UniqueName: \"kubernetes.io/projected/2a8f0557-90ad-401d-8bd2-702615ae52d9-kube-api-access-c7bkb\") on node \"crc\" DevicePath \"\"" Sep 29 19:25:34 crc kubenswrapper[4779]: I0929 19:25:33.757122 4779 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/016ec3ae-ba64-4283-9dd1-1387254c5501-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 19:25:34 crc kubenswrapper[4779]: I0929 19:25:33.757131 4779 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/016ec3ae-ba64-4283-9dd1-1387254c5501-public-tls-certs\") on node \"crc\" DevicePath \"\"" Sep 29 19:25:34 crc kubenswrapper[4779]: I0929 19:25:33.757143 4779 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/016ec3ae-ba64-4283-9dd1-1387254c5501-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 19:25:34 crc kubenswrapper[4779]: I0929 19:25:33.757151 4779 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/2a8f0557-90ad-401d-8bd2-702615ae52d9-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 19:25:34 crc kubenswrapper[4779]: I0929 19:25:33.757158 4779 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2a8f0557-90ad-401d-8bd2-702615ae52d9-logs\") on node \"crc\" DevicePath \"\"" Sep 29 19:25:34 crc kubenswrapper[4779]: I0929 19:25:33.757166 4779 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/2a8f0557-90ad-401d-8bd2-702615ae52d9-scripts\") on node \"crc\" DevicePath \"\"" Sep 29 19:25:34 crc kubenswrapper[4779]: I0929 19:25:33.757174 4779 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/016ec3ae-ba64-4283-9dd1-1387254c5501-httpd-run\") on node \"crc\" DevicePath \"\"" Sep 29 19:25:34 crc kubenswrapper[4779]: I0929 19:25:33.757181 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tgts6\" (UniqueName: \"kubernetes.io/projected/016ec3ae-ba64-4283-9dd1-1387254c5501-kube-api-access-tgts6\") on node \"crc\" DevicePath \"\"" Sep 29 19:25:34 crc kubenswrapper[4779]: I0929 19:25:33.757190 4779 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/2a8f0557-90ad-401d-8bd2-702615ae52d9-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Sep 29 19:25:34 crc kubenswrapper[4779]: I0929 19:25:33.757198 4779 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/016ec3ae-ba64-4283-9dd1-1387254c5501-scripts\") on node \"crc\" DevicePath \"\"" Sep 29 19:25:34 crc kubenswrapper[4779]: I0929 19:25:33.757205 4779 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/016ec3ae-ba64-4283-9dd1-1387254c5501-logs\") on node \"crc\" DevicePath \"\"" Sep 29 19:25:34 crc kubenswrapper[4779]: I0929 19:25:33.779741 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09860743-67e8-4aa3-9814-90b13f91317e" path="/var/lib/kubelet/pods/09860743-67e8-4aa3-9814-90b13f91317e/volumes" Sep 29 19:25:34 crc kubenswrapper[4779]: I0929 19:25:33.780490 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1b8b65c9-68d0-462c-adb4-22ae32355bb6" path="/var/lib/kubelet/pods/1b8b65c9-68d0-462c-adb4-22ae32355bb6/volumes" Sep 29 19:25:34 crc kubenswrapper[4779]: I0929 19:25:33.781388 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="259eb000-7d67-43dc-9736-6ae36eb29098" path="/var/lib/kubelet/pods/259eb000-7d67-43dc-9736-6ae36eb29098/volumes" Sep 29 19:25:34 crc kubenswrapper[4779]: I0929 19:25:33.782136 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3616fd00-99ce-4801-bdc8-90174bac56ba" path="/var/lib/kubelet/pods/3616fd00-99ce-4801-bdc8-90174bac56ba/volumes" Sep 29 19:25:34 crc kubenswrapper[4779]: I0929 19:25:33.797021 4779 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage08-crc" (UniqueName: "kubernetes.io/local-volume/local-storage08-crc") on node "crc" Sep 29 19:25:34 crc kubenswrapper[4779]: I0929 19:25:33.858963 4779 reconciler_common.go:293] "Volume detached for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") on node \"crc\" DevicePath \"\"" Sep 29 19:25:34 crc kubenswrapper[4779]: I0929 19:25:33.865780 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-nrnv2" Sep 29 19:25:34 crc kubenswrapper[4779]: I0929 19:25:34.055127 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-fc6fd7df6-btpzz" event={"ID":"6cd722c9-4e9b-4bad-a9fd-84529803680b","Type":"ContainerStarted","Data":"738cd1f6790ee89393458550f844fec2f3b52411b6d77de3cdc49e9c5b894f4e"} Sep 29 19:25:34 crc kubenswrapper[4779]: I0929 19:25:34.056963 4779 generic.go:334] "Generic (PLEG): container finished" podID="1f88f50b-5057-4d90-b8d4-fdd4526eaf25" containerID="8d670c253599dc0e5bc29861aa4be93aaf4b0e553af70d422a3abb4a91d29125" exitCode=0 Sep 29 19:25:34 crc kubenswrapper[4779]: I0929 19:25:34.057018 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-df0d-account-create-fhcpp" event={"ID":"1f88f50b-5057-4d90-b8d4-fdd4526eaf25","Type":"ContainerDied","Data":"8d670c253599dc0e5bc29861aa4be93aaf4b0e553af70d422a3abb4a91d29125"} Sep 29 19:25:34 crc kubenswrapper[4779]: I0929 19:25:34.057041 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-df0d-account-create-fhcpp" event={"ID":"1f88f50b-5057-4d90-b8d4-fdd4526eaf25","Type":"ContainerStarted","Data":"fd936b7292a1c301730d54f6b74ca76db2285128d9be6871422720d359667a45"} Sep 29 19:25:34 crc kubenswrapper[4779]: I0929 19:25:34.058951 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-5f7856dd6f-tbtnj" event={"ID":"2a8f0557-90ad-401d-8bd2-702615ae52d9","Type":"ContainerDied","Data":"8d7cdfb8d1bd05f6b5baf82ebcfb0b971a8482e6c689830e69ae96e375dad8eb"} Sep 29 19:25:34 crc kubenswrapper[4779]: I0929 19:25:34.058958 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-5f7856dd6f-tbtnj" Sep 29 19:25:34 crc kubenswrapper[4779]: I0929 19:25:34.065071 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ab4fe873-f58e-4b0f-8d40-1ac1be214b0c","Type":"ContainerStarted","Data":"549a85bd0aea730e66cd62f4effe60d79b4e0b799a081a8d1cd429c4cc09c2b1"} Sep 29 19:25:34 crc kubenswrapper[4779]: I0929 19:25:34.066828 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-6c8fdd957c-ft6jd" event={"ID":"8ac1ccdd-a13c-4f3c-9e06-731454c5f58d","Type":"ContainerStarted","Data":"454ebba1ca13cbd1655115b4f6b9309da8ad7b7b164c81456af654ec81f1733e"} Sep 29 19:25:34 crc kubenswrapper[4779]: I0929 19:25:34.073588 4779 generic.go:334] "Generic (PLEG): container finished" podID="35f86f4e-1390-4e24-bd8e-2a5cd9899d29" containerID="c3b5560342e39c7e34e52d23e1dad9c38471e5cd37ab0775116e04fa32954ec5" exitCode=0 Sep 29 19:25:34 crc kubenswrapper[4779]: I0929 19:25:34.073768 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-5f48-account-create-vt6p4" event={"ID":"35f86f4e-1390-4e24-bd8e-2a5cd9899d29","Type":"ContainerDied","Data":"c3b5560342e39c7e34e52d23e1dad9c38471e5cd37ab0775116e04fa32954ec5"} Sep 29 19:25:34 crc kubenswrapper[4779]: I0929 19:25:34.073809 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-5f48-account-create-vt6p4" event={"ID":"35f86f4e-1390-4e24-bd8e-2a5cd9899d29","Type":"ContainerStarted","Data":"bbc226eca98d15120961c8e80d4965a0d9f8d170d01f06fc67909a81e7328d69"} Sep 29 19:25:34 crc kubenswrapper[4779]: I0929 19:25:34.076629 4779 generic.go:334] "Generic (PLEG): container finished" podID="b449307e-0969-471c-84c5-ce1a24b143e3" containerID="921e448cc9c870a1beb00d75c5b3de36f80f2aa45237cbacb26546bf2fe92438" exitCode=0 Sep 29 19:25:34 crc kubenswrapper[4779]: I0929 19:25:34.076668 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-5f5e-account-create-2n6rt" event={"ID":"b449307e-0969-471c-84c5-ce1a24b143e3","Type":"ContainerDied","Data":"921e448cc9c870a1beb00d75c5b3de36f80f2aa45237cbacb26546bf2fe92438"} Sep 29 19:25:34 crc kubenswrapper[4779]: I0929 19:25:34.076687 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-5f5e-account-create-2n6rt" event={"ID":"b449307e-0969-471c-84c5-ce1a24b143e3","Type":"ContainerStarted","Data":"69a9116efabfb8b21c791fdbd85663fac99bc11fbef1477192d8492e199da10f"} Sep 29 19:25:34 crc kubenswrapper[4779]: I0929 19:25:34.078655 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-6f7f5b6d48-8js86" event={"ID":"442ef8d4-8019-432f-8715-9b2a5aaaa022","Type":"ContainerStarted","Data":"411d397596b00ed8248c8e9e33024f632fc074c50d520f1bc0b2dfd34db9dc6f"} Sep 29 19:25:34 crc kubenswrapper[4779]: I0929 19:25:34.080935 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"016ec3ae-ba64-4283-9dd1-1387254c5501","Type":"ContainerDied","Data":"213f0608992298adcce44e841dd57be780ff48b5189ec762b2a86e1f4645cc08"} Sep 29 19:25:34 crc kubenswrapper[4779]: I0929 19:25:34.080961 4779 scope.go:117] "RemoveContainer" containerID="6c365d1714c8fa4b7b3767f2559f27d6ee99b4f97c2265d8a82870c7629c7e3e" Sep 29 19:25:34 crc kubenswrapper[4779]: I0929 19:25:34.080974 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Sep 29 19:25:34 crc kubenswrapper[4779]: I0929 19:25:34.254048 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-5f7856dd6f-tbtnj"] Sep 29 19:25:34 crc kubenswrapper[4779]: I0929 19:25:34.264558 4779 scope.go:117] "RemoveContainer" containerID="b43e99ccee18ccc3f9c75bb29bfb8e853ddad99cc490b8633995e3ca4a03d199" Sep 29 19:25:34 crc kubenswrapper[4779]: I0929 19:25:34.282187 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-5f7856dd6f-tbtnj"] Sep 29 19:25:34 crc kubenswrapper[4779]: I0929 19:25:34.309710 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Sep 29 19:25:34 crc kubenswrapper[4779]: I0929 19:25:34.323385 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-external-api-0"] Sep 29 19:25:34 crc kubenswrapper[4779]: I0929 19:25:34.342640 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Sep 29 19:25:34 crc kubenswrapper[4779]: E0929 19:25:34.343388 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="016ec3ae-ba64-4283-9dd1-1387254c5501" containerName="glance-httpd" Sep 29 19:25:34 crc kubenswrapper[4779]: I0929 19:25:34.343430 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="016ec3ae-ba64-4283-9dd1-1387254c5501" containerName="glance-httpd" Sep 29 19:25:34 crc kubenswrapper[4779]: E0929 19:25:34.343452 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="016ec3ae-ba64-4283-9dd1-1387254c5501" containerName="glance-log" Sep 29 19:25:34 crc kubenswrapper[4779]: I0929 19:25:34.343461 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="016ec3ae-ba64-4283-9dd1-1387254c5501" containerName="glance-log" Sep 29 19:25:34 crc kubenswrapper[4779]: I0929 19:25:34.344098 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="016ec3ae-ba64-4283-9dd1-1387254c5501" containerName="glance-httpd" Sep 29 19:25:34 crc kubenswrapper[4779]: I0929 19:25:34.344123 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="016ec3ae-ba64-4283-9dd1-1387254c5501" containerName="glance-log" Sep 29 19:25:34 crc kubenswrapper[4779]: I0929 19:25:34.349021 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Sep 29 19:25:34 crc kubenswrapper[4779]: I0929 19:25:34.349124 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Sep 29 19:25:34 crc kubenswrapper[4779]: I0929 19:25:34.354253 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Sep 29 19:25:34 crc kubenswrapper[4779]: I0929 19:25:34.354366 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-public-svc" Sep 29 19:25:34 crc kubenswrapper[4779]: I0929 19:25:34.483162 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"glance-default-external-api-0\" (UID: \"be202ce5-0468-47b9-94bd-4bf15e9ad65b\") " pod="openstack/glance-default-external-api-0" Sep 29 19:25:34 crc kubenswrapper[4779]: I0929 19:25:34.483206 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/be202ce5-0468-47b9-94bd-4bf15e9ad65b-logs\") pod \"glance-default-external-api-0\" (UID: \"be202ce5-0468-47b9-94bd-4bf15e9ad65b\") " pod="openstack/glance-default-external-api-0" Sep 29 19:25:34 crc kubenswrapper[4779]: I0929 19:25:34.483255 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/be202ce5-0468-47b9-94bd-4bf15e9ad65b-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"be202ce5-0468-47b9-94bd-4bf15e9ad65b\") " pod="openstack/glance-default-external-api-0" Sep 29 19:25:34 crc kubenswrapper[4779]: I0929 19:25:34.483274 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/be202ce5-0468-47b9-94bd-4bf15e9ad65b-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"be202ce5-0468-47b9-94bd-4bf15e9ad65b\") " pod="openstack/glance-default-external-api-0" Sep 29 19:25:34 crc kubenswrapper[4779]: I0929 19:25:34.483368 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/be202ce5-0468-47b9-94bd-4bf15e9ad65b-config-data\") pod \"glance-default-external-api-0\" (UID: \"be202ce5-0468-47b9-94bd-4bf15e9ad65b\") " pod="openstack/glance-default-external-api-0" Sep 29 19:25:34 crc kubenswrapper[4779]: I0929 19:25:34.483446 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/be202ce5-0468-47b9-94bd-4bf15e9ad65b-scripts\") pod \"glance-default-external-api-0\" (UID: \"be202ce5-0468-47b9-94bd-4bf15e9ad65b\") " pod="openstack/glance-default-external-api-0" Sep 29 19:25:34 crc kubenswrapper[4779]: I0929 19:25:34.483519 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sz46f\" (UniqueName: \"kubernetes.io/projected/be202ce5-0468-47b9-94bd-4bf15e9ad65b-kube-api-access-sz46f\") pod \"glance-default-external-api-0\" (UID: \"be202ce5-0468-47b9-94bd-4bf15e9ad65b\") " pod="openstack/glance-default-external-api-0" Sep 29 19:25:34 crc kubenswrapper[4779]: I0929 19:25:34.483607 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/be202ce5-0468-47b9-94bd-4bf15e9ad65b-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"be202ce5-0468-47b9-94bd-4bf15e9ad65b\") " pod="openstack/glance-default-external-api-0" Sep 29 19:25:34 crc kubenswrapper[4779]: I0929 19:25:34.585442 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"glance-default-external-api-0\" (UID: \"be202ce5-0468-47b9-94bd-4bf15e9ad65b\") " pod="openstack/glance-default-external-api-0" Sep 29 19:25:34 crc kubenswrapper[4779]: I0929 19:25:34.585482 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/be202ce5-0468-47b9-94bd-4bf15e9ad65b-logs\") pod \"glance-default-external-api-0\" (UID: \"be202ce5-0468-47b9-94bd-4bf15e9ad65b\") " pod="openstack/glance-default-external-api-0" Sep 29 19:25:34 crc kubenswrapper[4779]: I0929 19:25:34.585536 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/be202ce5-0468-47b9-94bd-4bf15e9ad65b-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"be202ce5-0468-47b9-94bd-4bf15e9ad65b\") " pod="openstack/glance-default-external-api-0" Sep 29 19:25:34 crc kubenswrapper[4779]: I0929 19:25:34.585558 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/be202ce5-0468-47b9-94bd-4bf15e9ad65b-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"be202ce5-0468-47b9-94bd-4bf15e9ad65b\") " pod="openstack/glance-default-external-api-0" Sep 29 19:25:34 crc kubenswrapper[4779]: I0929 19:25:34.585589 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/be202ce5-0468-47b9-94bd-4bf15e9ad65b-config-data\") pod \"glance-default-external-api-0\" (UID: \"be202ce5-0468-47b9-94bd-4bf15e9ad65b\") " pod="openstack/glance-default-external-api-0" Sep 29 19:25:34 crc kubenswrapper[4779]: I0929 19:25:34.585615 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/be202ce5-0468-47b9-94bd-4bf15e9ad65b-scripts\") pod \"glance-default-external-api-0\" (UID: \"be202ce5-0468-47b9-94bd-4bf15e9ad65b\") " pod="openstack/glance-default-external-api-0" Sep 29 19:25:34 crc kubenswrapper[4779]: I0929 19:25:34.585658 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sz46f\" (UniqueName: \"kubernetes.io/projected/be202ce5-0468-47b9-94bd-4bf15e9ad65b-kube-api-access-sz46f\") pod \"glance-default-external-api-0\" (UID: \"be202ce5-0468-47b9-94bd-4bf15e9ad65b\") " pod="openstack/glance-default-external-api-0" Sep 29 19:25:34 crc kubenswrapper[4779]: I0929 19:25:34.585691 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/be202ce5-0468-47b9-94bd-4bf15e9ad65b-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"be202ce5-0468-47b9-94bd-4bf15e9ad65b\") " pod="openstack/glance-default-external-api-0" Sep 29 19:25:34 crc kubenswrapper[4779]: I0929 19:25:34.588610 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/be202ce5-0468-47b9-94bd-4bf15e9ad65b-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"be202ce5-0468-47b9-94bd-4bf15e9ad65b\") " pod="openstack/glance-default-external-api-0" Sep 29 19:25:34 crc kubenswrapper[4779]: I0929 19:25:34.588656 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/be202ce5-0468-47b9-94bd-4bf15e9ad65b-logs\") pod \"glance-default-external-api-0\" (UID: \"be202ce5-0468-47b9-94bd-4bf15e9ad65b\") " pod="openstack/glance-default-external-api-0" Sep 29 19:25:34 crc kubenswrapper[4779]: I0929 19:25:34.588874 4779 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"glance-default-external-api-0\" (UID: \"be202ce5-0468-47b9-94bd-4bf15e9ad65b\") device mount path \"/mnt/openstack/pv08\"" pod="openstack/glance-default-external-api-0" Sep 29 19:25:34 crc kubenswrapper[4779]: I0929 19:25:34.592446 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/be202ce5-0468-47b9-94bd-4bf15e9ad65b-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"be202ce5-0468-47b9-94bd-4bf15e9ad65b\") " pod="openstack/glance-default-external-api-0" Sep 29 19:25:34 crc kubenswrapper[4779]: I0929 19:25:34.592823 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/be202ce5-0468-47b9-94bd-4bf15e9ad65b-scripts\") pod \"glance-default-external-api-0\" (UID: \"be202ce5-0468-47b9-94bd-4bf15e9ad65b\") " pod="openstack/glance-default-external-api-0" Sep 29 19:25:34 crc kubenswrapper[4779]: I0929 19:25:34.594073 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/be202ce5-0468-47b9-94bd-4bf15e9ad65b-config-data\") pod \"glance-default-external-api-0\" (UID: \"be202ce5-0468-47b9-94bd-4bf15e9ad65b\") " pod="openstack/glance-default-external-api-0" Sep 29 19:25:34 crc kubenswrapper[4779]: I0929 19:25:34.594625 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/be202ce5-0468-47b9-94bd-4bf15e9ad65b-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"be202ce5-0468-47b9-94bd-4bf15e9ad65b\") " pod="openstack/glance-default-external-api-0" Sep 29 19:25:34 crc kubenswrapper[4779]: I0929 19:25:34.611072 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sz46f\" (UniqueName: \"kubernetes.io/projected/be202ce5-0468-47b9-94bd-4bf15e9ad65b-kube-api-access-sz46f\") pod \"glance-default-external-api-0\" (UID: \"be202ce5-0468-47b9-94bd-4bf15e9ad65b\") " pod="openstack/glance-default-external-api-0" Sep 29 19:25:34 crc kubenswrapper[4779]: I0929 19:25:34.631717 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"glance-default-external-api-0\" (UID: \"be202ce5-0468-47b9-94bd-4bf15e9ad65b\") " pod="openstack/glance-default-external-api-0" Sep 29 19:25:34 crc kubenswrapper[4779]: I0929 19:25:34.639753 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Sep 29 19:25:34 crc kubenswrapper[4779]: I0929 19:25:34.664998 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Sep 29 19:25:34 crc kubenswrapper[4779]: W0929 19:25:34.677983 4779 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod81b272b7_f428_4787_b48e_3afcf7e4c8d0.slice/crio-70bfe4939bac3b8923ef36f4c6f320830653726acda4cfb88516183ed8be3a11 WatchSource:0}: Error finding container 70bfe4939bac3b8923ef36f4c6f320830653726acda4cfb88516183ed8be3a11: Status 404 returned error can't find the container with id 70bfe4939bac3b8923ef36f4c6f320830653726acda4cfb88516183ed8be3a11 Sep 29 19:25:34 crc kubenswrapper[4779]: I0929 19:25:34.683545 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-nrnv2"] Sep 29 19:25:34 crc kubenswrapper[4779]: W0929 19:25:34.694093 4779 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2a64e434_cdce_47ed_9c44_ab5109920fc7.slice/crio-75ddf99dada94b08b21303ac71dd63e7ad30469801a494615fe0c6b952b074de WatchSource:0}: Error finding container 75ddf99dada94b08b21303ac71dd63e7ad30469801a494615fe0c6b952b074de: Status 404 returned error can't find the container with id 75ddf99dada94b08b21303ac71dd63e7ad30469801a494615fe0c6b952b074de Sep 29 19:25:35 crc kubenswrapper[4779]: I0929 19:25:35.106303 4779 generic.go:334] "Generic (PLEG): container finished" podID="48f7f919-2b5f-4954-8aa8-0b0abf3cb4fe" containerID="f5e83d93913e6d73ab31cd72f16ff3e845ff2ee36708c4e494e4182e7f54e320" exitCode=0 Sep 29 19:25:35 crc kubenswrapper[4779]: I0929 19:25:35.106640 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-xphf4" event={"ID":"48f7f919-2b5f-4954-8aa8-0b0abf3cb4fe","Type":"ContainerDied","Data":"f5e83d93913e6d73ab31cd72f16ff3e845ff2ee36708c4e494e4182e7f54e320"} Sep 29 19:25:35 crc kubenswrapper[4779]: I0929 19:25:35.131122 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-6f7f5b6d48-8js86" event={"ID":"442ef8d4-8019-432f-8715-9b2a5aaaa022","Type":"ContainerStarted","Data":"a7358d5a6b25fe588711b862903a2db92f9bbab4cc48eab5bf60c35eecf79ece"} Sep 29 19:25:35 crc kubenswrapper[4779]: I0929 19:25:35.131162 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-6f7f5b6d48-8js86" event={"ID":"442ef8d4-8019-432f-8715-9b2a5aaaa022","Type":"ContainerStarted","Data":"193924d7b40387c777023562b1dc745c8cabd5533a384e043e26a919e41b5bb1"} Sep 29 19:25:35 crc kubenswrapper[4779]: I0929 19:25:35.139576 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-fc6fd7df6-btpzz" event={"ID":"6cd722c9-4e9b-4bad-a9fd-84529803680b","Type":"ContainerStarted","Data":"502ccc6c29ce389e4ed97b2d5bda74d48401d22f269d5ef953ff95b02f0e7365"} Sep 29 19:25:35 crc kubenswrapper[4779]: I0929 19:25:35.139702 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-fc6fd7df6-btpzz" event={"ID":"6cd722c9-4e9b-4bad-a9fd-84529803680b","Type":"ContainerStarted","Data":"b43e3e5c70a4cc7cb8b3924f18fb0fe2e9048cfd9b5406c0f0cd1a8d86823c35"} Sep 29 19:25:35 crc kubenswrapper[4779]: I0929 19:25:35.161646 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Sep 29 19:25:35 crc kubenswrapper[4779]: I0929 19:25:35.165285 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/horizon-6f7f5b6d48-8js86" podStartSLOduration=11.399437723 podStartE2EDuration="12.165265699s" podCreationTimestamp="2025-09-29 19:25:23 +0000 UTC" firstStartedPulling="2025-09-29 19:25:33.526233254 +0000 UTC m=+1044.410658354" lastFinishedPulling="2025-09-29 19:25:34.29206123 +0000 UTC m=+1045.176486330" observedRunningTime="2025-09-29 19:25:35.150973681 +0000 UTC m=+1046.035398781" watchObservedRunningTime="2025-09-29 19:25:35.165265699 +0000 UTC m=+1046.049690789" Sep 29 19:25:35 crc kubenswrapper[4779]: I0929 19:25:35.166249 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-6c8fdd957c-ft6jd" event={"ID":"8ac1ccdd-a13c-4f3c-9e06-731454c5f58d","Type":"ContainerStarted","Data":"2d7978690ffe9b5b89ea08a4a5420238484738ad683c853f66a8429d474249a3"} Sep 29 19:25:35 crc kubenswrapper[4779]: I0929 19:25:35.166297 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-6c8fdd957c-ft6jd" event={"ID":"8ac1ccdd-a13c-4f3c-9e06-731454c5f58d","Type":"ContainerStarted","Data":"28c041be6ca4d08a38ed85b2b19f3ce8abf070551e701d0eba1bbea6f712dd6c"} Sep 29 19:25:35 crc kubenswrapper[4779]: I0929 19:25:35.166345 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-6c8fdd957c-ft6jd" podUID="8ac1ccdd-a13c-4f3c-9e06-731454c5f58d" containerName="horizon" containerID="cri-o://2d7978690ffe9b5b89ea08a4a5420238484738ad683c853f66a8429d474249a3" gracePeriod=30 Sep 29 19:25:35 crc kubenswrapper[4779]: I0929 19:25:35.166307 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-6c8fdd957c-ft6jd" podUID="8ac1ccdd-a13c-4f3c-9e06-731454c5f58d" containerName="horizon-log" containerID="cri-o://28c041be6ca4d08a38ed85b2b19f3ce8abf070551e701d0eba1bbea6f712dd6c" gracePeriod=30 Sep 29 19:25:35 crc kubenswrapper[4779]: I0929 19:25:35.175097 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"81b272b7-f428-4787-b48e-3afcf7e4c8d0","Type":"ContainerStarted","Data":"70bfe4939bac3b8923ef36f4c6f320830653726acda4cfb88516183ed8be3a11"} Sep 29 19:25:35 crc kubenswrapper[4779]: I0929 19:25:35.179139 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/horizon-fc6fd7df6-btpzz" podStartSLOduration=11.529999527 podStartE2EDuration="12.179123415s" podCreationTimestamp="2025-09-29 19:25:23 +0000 UTC" firstStartedPulling="2025-09-29 19:25:33.526948484 +0000 UTC m=+1044.411373584" lastFinishedPulling="2025-09-29 19:25:34.176072372 +0000 UTC m=+1045.060497472" observedRunningTime="2025-09-29 19:25:35.167961132 +0000 UTC m=+1046.052386232" watchObservedRunningTime="2025-09-29 19:25:35.179123415 +0000 UTC m=+1046.063548505" Sep 29 19:25:35 crc kubenswrapper[4779]: I0929 19:25:35.181854 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-nrnv2" event={"ID":"2a64e434-cdce-47ed-9c44-ab5109920fc7","Type":"ContainerStarted","Data":"aeef032f15bbbc9a9afdbe35cf7bcd63a67cdbcb686d8bab9bb709083cb9ae8a"} Sep 29 19:25:35 crc kubenswrapper[4779]: I0929 19:25:35.181895 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-nrnv2" event={"ID":"2a64e434-cdce-47ed-9c44-ab5109920fc7","Type":"ContainerStarted","Data":"75ddf99dada94b08b21303ac71dd63e7ad30469801a494615fe0c6b952b074de"} Sep 29 19:25:35 crc kubenswrapper[4779]: I0929 19:25:35.193772 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/horizon-6c8fdd957c-ft6jd" podStartSLOduration=16.466812502 podStartE2EDuration="17.193751802s" podCreationTimestamp="2025-09-29 19:25:18 +0000 UTC" firstStartedPulling="2025-09-29 19:25:33.587638411 +0000 UTC m=+1044.472063521" lastFinishedPulling="2025-09-29 19:25:34.314577711 +0000 UTC m=+1045.199002821" observedRunningTime="2025-09-29 19:25:35.188840329 +0000 UTC m=+1046.073265429" watchObservedRunningTime="2025-09-29 19:25:35.193751802 +0000 UTC m=+1046.078176902" Sep 29 19:25:35 crc kubenswrapper[4779]: I0929 19:25:35.549828 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-df0d-account-create-fhcpp" Sep 29 19:25:35 crc kubenswrapper[4779]: I0929 19:25:35.597008 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-bootstrap-nrnv2" podStartSLOduration=2.596991377 podStartE2EDuration="2.596991377s" podCreationTimestamp="2025-09-29 19:25:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 19:25:35.219650505 +0000 UTC m=+1046.104075605" watchObservedRunningTime="2025-09-29 19:25:35.596991377 +0000 UTC m=+1046.481416477" Sep 29 19:25:35 crc kubenswrapper[4779]: I0929 19:25:35.613675 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dp7vj\" (UniqueName: \"kubernetes.io/projected/1f88f50b-5057-4d90-b8d4-fdd4526eaf25-kube-api-access-dp7vj\") pod \"1f88f50b-5057-4d90-b8d4-fdd4526eaf25\" (UID: \"1f88f50b-5057-4d90-b8d4-fdd4526eaf25\") " Sep 29 19:25:35 crc kubenswrapper[4779]: I0929 19:25:35.618434 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1f88f50b-5057-4d90-b8d4-fdd4526eaf25-kube-api-access-dp7vj" (OuterVolumeSpecName: "kube-api-access-dp7vj") pod "1f88f50b-5057-4d90-b8d4-fdd4526eaf25" (UID: "1f88f50b-5057-4d90-b8d4-fdd4526eaf25"). InnerVolumeSpecName "kube-api-access-dp7vj". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:25:35 crc kubenswrapper[4779]: I0929 19:25:35.689601 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-5f48-account-create-vt6p4" Sep 29 19:25:35 crc kubenswrapper[4779]: I0929 19:25:35.701229 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-5f5e-account-create-2n6rt" Sep 29 19:25:35 crc kubenswrapper[4779]: I0929 19:25:35.718464 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dp7vj\" (UniqueName: \"kubernetes.io/projected/1f88f50b-5057-4d90-b8d4-fdd4526eaf25-kube-api-access-dp7vj\") on node \"crc\" DevicePath \"\"" Sep 29 19:25:35 crc kubenswrapper[4779]: I0929 19:25:35.816481 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="016ec3ae-ba64-4283-9dd1-1387254c5501" path="/var/lib/kubelet/pods/016ec3ae-ba64-4283-9dd1-1387254c5501/volumes" Sep 29 19:25:35 crc kubenswrapper[4779]: I0929 19:25:35.817239 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2a8f0557-90ad-401d-8bd2-702615ae52d9" path="/var/lib/kubelet/pods/2a8f0557-90ad-401d-8bd2-702615ae52d9/volumes" Sep 29 19:25:35 crc kubenswrapper[4779]: I0929 19:25:35.819659 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wbl47\" (UniqueName: \"kubernetes.io/projected/b449307e-0969-471c-84c5-ce1a24b143e3-kube-api-access-wbl47\") pod \"b449307e-0969-471c-84c5-ce1a24b143e3\" (UID: \"b449307e-0969-471c-84c5-ce1a24b143e3\") " Sep 29 19:25:35 crc kubenswrapper[4779]: I0929 19:25:35.819758 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qdqz4\" (UniqueName: \"kubernetes.io/projected/35f86f4e-1390-4e24-bd8e-2a5cd9899d29-kube-api-access-qdqz4\") pod \"35f86f4e-1390-4e24-bd8e-2a5cd9899d29\" (UID: \"35f86f4e-1390-4e24-bd8e-2a5cd9899d29\") " Sep 29 19:25:35 crc kubenswrapper[4779]: I0929 19:25:35.852803 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b449307e-0969-471c-84c5-ce1a24b143e3-kube-api-access-wbl47" (OuterVolumeSpecName: "kube-api-access-wbl47") pod "b449307e-0969-471c-84c5-ce1a24b143e3" (UID: "b449307e-0969-471c-84c5-ce1a24b143e3"). InnerVolumeSpecName "kube-api-access-wbl47". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:25:35 crc kubenswrapper[4779]: I0929 19:25:35.861657 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/35f86f4e-1390-4e24-bd8e-2a5cd9899d29-kube-api-access-qdqz4" (OuterVolumeSpecName: "kube-api-access-qdqz4") pod "35f86f4e-1390-4e24-bd8e-2a5cd9899d29" (UID: "35f86f4e-1390-4e24-bd8e-2a5cd9899d29"). InnerVolumeSpecName "kube-api-access-qdqz4". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:25:35 crc kubenswrapper[4779]: I0929 19:25:35.921426 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qdqz4\" (UniqueName: \"kubernetes.io/projected/35f86f4e-1390-4e24-bd8e-2a5cd9899d29-kube-api-access-qdqz4\") on node \"crc\" DevicePath \"\"" Sep 29 19:25:35 crc kubenswrapper[4779]: I0929 19:25:35.921452 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wbl47\" (UniqueName: \"kubernetes.io/projected/b449307e-0969-471c-84c5-ce1a24b143e3-kube-api-access-wbl47\") on node \"crc\" DevicePath \"\"" Sep 29 19:25:36 crc kubenswrapper[4779]: I0929 19:25:36.204704 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-5f5e-account-create-2n6rt" event={"ID":"b449307e-0969-471c-84c5-ce1a24b143e3","Type":"ContainerDied","Data":"69a9116efabfb8b21c791fdbd85663fac99bc11fbef1477192d8492e199da10f"} Sep 29 19:25:36 crc kubenswrapper[4779]: I0929 19:25:36.205026 4779 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="69a9116efabfb8b21c791fdbd85663fac99bc11fbef1477192d8492e199da10f" Sep 29 19:25:36 crc kubenswrapper[4779]: I0929 19:25:36.205081 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-5f5e-account-create-2n6rt" Sep 29 19:25:36 crc kubenswrapper[4779]: I0929 19:25:36.218079 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"81b272b7-f428-4787-b48e-3afcf7e4c8d0","Type":"ContainerStarted","Data":"84cac98c4a76840282215ff3ab3b592f6ac47af3eaa67cd43f5a9e73129b5d79"} Sep 29 19:25:36 crc kubenswrapper[4779]: I0929 19:25:36.220007 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-5f48-account-create-vt6p4" event={"ID":"35f86f4e-1390-4e24-bd8e-2a5cd9899d29","Type":"ContainerDied","Data":"bbc226eca98d15120961c8e80d4965a0d9f8d170d01f06fc67909a81e7328d69"} Sep 29 19:25:36 crc kubenswrapper[4779]: I0929 19:25:36.220031 4779 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="bbc226eca98d15120961c8e80d4965a0d9f8d170d01f06fc67909a81e7328d69" Sep 29 19:25:36 crc kubenswrapper[4779]: I0929 19:25:36.220083 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-5f48-account-create-vt6p4" Sep 29 19:25:36 crc kubenswrapper[4779]: I0929 19:25:36.223891 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"be202ce5-0468-47b9-94bd-4bf15e9ad65b","Type":"ContainerStarted","Data":"a5bfae5491dc68369f3b0bb4f5458e92356ba49c7e40f4ad60f3cf763e8fed7a"} Sep 29 19:25:36 crc kubenswrapper[4779]: I0929 19:25:36.223931 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"be202ce5-0468-47b9-94bd-4bf15e9ad65b","Type":"ContainerStarted","Data":"53fa5a67caa0ea269793cad27d822aa72b62e50c7a5a2e12eb8fe2ece55e4d64"} Sep 29 19:25:36 crc kubenswrapper[4779]: I0929 19:25:36.225818 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-df0d-account-create-fhcpp" event={"ID":"1f88f50b-5057-4d90-b8d4-fdd4526eaf25","Type":"ContainerDied","Data":"fd936b7292a1c301730d54f6b74ca76db2285128d9be6871422720d359667a45"} Sep 29 19:25:36 crc kubenswrapper[4779]: I0929 19:25:36.225859 4779 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="fd936b7292a1c301730d54f6b74ca76db2285128d9be6871422720d359667a45" Sep 29 19:25:36 crc kubenswrapper[4779]: I0929 19:25:36.225916 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-df0d-account-create-fhcpp" Sep 29 19:25:36 crc kubenswrapper[4779]: I0929 19:25:36.495551 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-xphf4" Sep 29 19:25:36 crc kubenswrapper[4779]: I0929 19:25:36.535152 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/48f7f919-2b5f-4954-8aa8-0b0abf3cb4fe-config-data\") pod \"48f7f919-2b5f-4954-8aa8-0b0abf3cb4fe\" (UID: \"48f7f919-2b5f-4954-8aa8-0b0abf3cb4fe\") " Sep 29 19:25:36 crc kubenswrapper[4779]: I0929 19:25:36.535220 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/48f7f919-2b5f-4954-8aa8-0b0abf3cb4fe-scripts\") pod \"48f7f919-2b5f-4954-8aa8-0b0abf3cb4fe\" (UID: \"48f7f919-2b5f-4954-8aa8-0b0abf3cb4fe\") " Sep 29 19:25:36 crc kubenswrapper[4779]: I0929 19:25:36.535499 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qng7w\" (UniqueName: \"kubernetes.io/projected/48f7f919-2b5f-4954-8aa8-0b0abf3cb4fe-kube-api-access-qng7w\") pod \"48f7f919-2b5f-4954-8aa8-0b0abf3cb4fe\" (UID: \"48f7f919-2b5f-4954-8aa8-0b0abf3cb4fe\") " Sep 29 19:25:36 crc kubenswrapper[4779]: I0929 19:25:36.535550 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/48f7f919-2b5f-4954-8aa8-0b0abf3cb4fe-combined-ca-bundle\") pod \"48f7f919-2b5f-4954-8aa8-0b0abf3cb4fe\" (UID: \"48f7f919-2b5f-4954-8aa8-0b0abf3cb4fe\") " Sep 29 19:25:36 crc kubenswrapper[4779]: I0929 19:25:36.535586 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/48f7f919-2b5f-4954-8aa8-0b0abf3cb4fe-logs\") pod \"48f7f919-2b5f-4954-8aa8-0b0abf3cb4fe\" (UID: \"48f7f919-2b5f-4954-8aa8-0b0abf3cb4fe\") " Sep 29 19:25:36 crc kubenswrapper[4779]: I0929 19:25:36.536475 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/48f7f919-2b5f-4954-8aa8-0b0abf3cb4fe-logs" (OuterVolumeSpecName: "logs") pod "48f7f919-2b5f-4954-8aa8-0b0abf3cb4fe" (UID: "48f7f919-2b5f-4954-8aa8-0b0abf3cb4fe"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 19:25:36 crc kubenswrapper[4779]: I0929 19:25:36.543690 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/48f7f919-2b5f-4954-8aa8-0b0abf3cb4fe-scripts" (OuterVolumeSpecName: "scripts") pod "48f7f919-2b5f-4954-8aa8-0b0abf3cb4fe" (UID: "48f7f919-2b5f-4954-8aa8-0b0abf3cb4fe"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:25:36 crc kubenswrapper[4779]: I0929 19:25:36.549472 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/48f7f919-2b5f-4954-8aa8-0b0abf3cb4fe-kube-api-access-qng7w" (OuterVolumeSpecName: "kube-api-access-qng7w") pod "48f7f919-2b5f-4954-8aa8-0b0abf3cb4fe" (UID: "48f7f919-2b5f-4954-8aa8-0b0abf3cb4fe"). InnerVolumeSpecName "kube-api-access-qng7w". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:25:36 crc kubenswrapper[4779]: I0929 19:25:36.565791 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/48f7f919-2b5f-4954-8aa8-0b0abf3cb4fe-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "48f7f919-2b5f-4954-8aa8-0b0abf3cb4fe" (UID: "48f7f919-2b5f-4954-8aa8-0b0abf3cb4fe"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:25:36 crc kubenswrapper[4779]: I0929 19:25:36.568792 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/48f7f919-2b5f-4954-8aa8-0b0abf3cb4fe-config-data" (OuterVolumeSpecName: "config-data") pod "48f7f919-2b5f-4954-8aa8-0b0abf3cb4fe" (UID: "48f7f919-2b5f-4954-8aa8-0b0abf3cb4fe"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:25:36 crc kubenswrapper[4779]: I0929 19:25:36.637106 4779 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/48f7f919-2b5f-4954-8aa8-0b0abf3cb4fe-scripts\") on node \"crc\" DevicePath \"\"" Sep 29 19:25:36 crc kubenswrapper[4779]: I0929 19:25:36.637287 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qng7w\" (UniqueName: \"kubernetes.io/projected/48f7f919-2b5f-4954-8aa8-0b0abf3cb4fe-kube-api-access-qng7w\") on node \"crc\" DevicePath \"\"" Sep 29 19:25:36 crc kubenswrapper[4779]: I0929 19:25:36.637442 4779 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/48f7f919-2b5f-4954-8aa8-0b0abf3cb4fe-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 19:25:36 crc kubenswrapper[4779]: I0929 19:25:36.637507 4779 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/48f7f919-2b5f-4954-8aa8-0b0abf3cb4fe-logs\") on node \"crc\" DevicePath \"\"" Sep 29 19:25:36 crc kubenswrapper[4779]: I0929 19:25:36.637587 4779 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/48f7f919-2b5f-4954-8aa8-0b0abf3cb4fe-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 19:25:37 crc kubenswrapper[4779]: I0929 19:25:37.226893 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-7846dbb58d-4ftsw"] Sep 29 19:25:37 crc kubenswrapper[4779]: E0929 19:25:37.227622 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="48f7f919-2b5f-4954-8aa8-0b0abf3cb4fe" containerName="placement-db-sync" Sep 29 19:25:37 crc kubenswrapper[4779]: I0929 19:25:37.227646 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="48f7f919-2b5f-4954-8aa8-0b0abf3cb4fe" containerName="placement-db-sync" Sep 29 19:25:37 crc kubenswrapper[4779]: E0929 19:25:37.227659 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b449307e-0969-471c-84c5-ce1a24b143e3" containerName="mariadb-account-create" Sep 29 19:25:37 crc kubenswrapper[4779]: I0929 19:25:37.227666 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="b449307e-0969-471c-84c5-ce1a24b143e3" containerName="mariadb-account-create" Sep 29 19:25:37 crc kubenswrapper[4779]: E0929 19:25:37.227690 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="35f86f4e-1390-4e24-bd8e-2a5cd9899d29" containerName="mariadb-account-create" Sep 29 19:25:37 crc kubenswrapper[4779]: I0929 19:25:37.227698 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="35f86f4e-1390-4e24-bd8e-2a5cd9899d29" containerName="mariadb-account-create" Sep 29 19:25:37 crc kubenswrapper[4779]: E0929 19:25:37.227719 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1f88f50b-5057-4d90-b8d4-fdd4526eaf25" containerName="mariadb-account-create" Sep 29 19:25:37 crc kubenswrapper[4779]: I0929 19:25:37.227727 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="1f88f50b-5057-4d90-b8d4-fdd4526eaf25" containerName="mariadb-account-create" Sep 29 19:25:37 crc kubenswrapper[4779]: I0929 19:25:37.227923 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="35f86f4e-1390-4e24-bd8e-2a5cd9899d29" containerName="mariadb-account-create" Sep 29 19:25:37 crc kubenswrapper[4779]: I0929 19:25:37.227951 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="1f88f50b-5057-4d90-b8d4-fdd4526eaf25" containerName="mariadb-account-create" Sep 29 19:25:37 crc kubenswrapper[4779]: I0929 19:25:37.227978 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="b449307e-0969-471c-84c5-ce1a24b143e3" containerName="mariadb-account-create" Sep 29 19:25:37 crc kubenswrapper[4779]: I0929 19:25:37.227991 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="48f7f919-2b5f-4954-8aa8-0b0abf3cb4fe" containerName="placement-db-sync" Sep 29 19:25:37 crc kubenswrapper[4779]: I0929 19:25:37.229141 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-7846dbb58d-4ftsw" Sep 29 19:25:37 crc kubenswrapper[4779]: I0929 19:25:37.232187 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-placement-internal-svc" Sep 29 19:25:37 crc kubenswrapper[4779]: I0929 19:25:37.233290 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-placement-public-svc" Sep 29 19:25:37 crc kubenswrapper[4779]: I0929 19:25:37.243742 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-7846dbb58d-4ftsw"] Sep 29 19:25:37 crc kubenswrapper[4779]: I0929 19:25:37.255300 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"81b272b7-f428-4787-b48e-3afcf7e4c8d0","Type":"ContainerStarted","Data":"89810bd9c684160b3b3b03fb5ff83db2b6c73920aef64a82ff698aa35e4760b4"} Sep 29 19:25:37 crc kubenswrapper[4779]: I0929 19:25:37.279457 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"be202ce5-0468-47b9-94bd-4bf15e9ad65b","Type":"ContainerStarted","Data":"efe227d05a85321bfef2305a94bd917d7139d78388dcc95dc3a82ee9ddc1be7c"} Sep 29 19:25:37 crc kubenswrapper[4779]: I0929 19:25:37.288258 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-xphf4" event={"ID":"48f7f919-2b5f-4954-8aa8-0b0abf3cb4fe","Type":"ContainerDied","Data":"d1a10af839e0af4c4c2fd5839042e12441cfe88922744b62a6830001e9d829c6"} Sep 29 19:25:37 crc kubenswrapper[4779]: I0929 19:25:37.288302 4779 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d1a10af839e0af4c4c2fd5839042e12441cfe88922744b62a6830001e9d829c6" Sep 29 19:25:37 crc kubenswrapper[4779]: I0929 19:25:37.288423 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-xphf4" Sep 29 19:25:37 crc kubenswrapper[4779]: I0929 19:25:37.296993 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=4.296975378 podStartE2EDuration="4.296975378s" podCreationTimestamp="2025-09-29 19:25:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 19:25:37.290663156 +0000 UTC m=+1048.175088276" watchObservedRunningTime="2025-09-29 19:25:37.296975378 +0000 UTC m=+1048.181400478" Sep 29 19:25:37 crc kubenswrapper[4779]: I0929 19:25:37.320906 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=3.320887367 podStartE2EDuration="3.320887367s" podCreationTimestamp="2025-09-29 19:25:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 19:25:37.320241639 +0000 UTC m=+1048.204666749" watchObservedRunningTime="2025-09-29 19:25:37.320887367 +0000 UTC m=+1048.205312467" Sep 29 19:25:37 crc kubenswrapper[4779]: I0929 19:25:37.358593 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7b7c84ea-91fb-467d-b935-6f5034a2d7fb-config-data\") pod \"placement-7846dbb58d-4ftsw\" (UID: \"7b7c84ea-91fb-467d-b935-6f5034a2d7fb\") " pod="openstack/placement-7846dbb58d-4ftsw" Sep 29 19:25:37 crc kubenswrapper[4779]: I0929 19:25:37.358816 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r77fs\" (UniqueName: \"kubernetes.io/projected/7b7c84ea-91fb-467d-b935-6f5034a2d7fb-kube-api-access-r77fs\") pod \"placement-7846dbb58d-4ftsw\" (UID: \"7b7c84ea-91fb-467d-b935-6f5034a2d7fb\") " pod="openstack/placement-7846dbb58d-4ftsw" Sep 29 19:25:37 crc kubenswrapper[4779]: I0929 19:25:37.358923 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/7b7c84ea-91fb-467d-b935-6f5034a2d7fb-internal-tls-certs\") pod \"placement-7846dbb58d-4ftsw\" (UID: \"7b7c84ea-91fb-467d-b935-6f5034a2d7fb\") " pod="openstack/placement-7846dbb58d-4ftsw" Sep 29 19:25:37 crc kubenswrapper[4779]: I0929 19:25:37.359046 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7b7c84ea-91fb-467d-b935-6f5034a2d7fb-scripts\") pod \"placement-7846dbb58d-4ftsw\" (UID: \"7b7c84ea-91fb-467d-b935-6f5034a2d7fb\") " pod="openstack/placement-7846dbb58d-4ftsw" Sep 29 19:25:37 crc kubenswrapper[4779]: I0929 19:25:37.359077 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/7b7c84ea-91fb-467d-b935-6f5034a2d7fb-public-tls-certs\") pod \"placement-7846dbb58d-4ftsw\" (UID: \"7b7c84ea-91fb-467d-b935-6f5034a2d7fb\") " pod="openstack/placement-7846dbb58d-4ftsw" Sep 29 19:25:37 crc kubenswrapper[4779]: I0929 19:25:37.359112 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7b7c84ea-91fb-467d-b935-6f5034a2d7fb-logs\") pod \"placement-7846dbb58d-4ftsw\" (UID: \"7b7c84ea-91fb-467d-b935-6f5034a2d7fb\") " pod="openstack/placement-7846dbb58d-4ftsw" Sep 29 19:25:37 crc kubenswrapper[4779]: I0929 19:25:37.359156 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7b7c84ea-91fb-467d-b935-6f5034a2d7fb-combined-ca-bundle\") pod \"placement-7846dbb58d-4ftsw\" (UID: \"7b7c84ea-91fb-467d-b935-6f5034a2d7fb\") " pod="openstack/placement-7846dbb58d-4ftsw" Sep 29 19:25:37 crc kubenswrapper[4779]: I0929 19:25:37.461383 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7b7c84ea-91fb-467d-b935-6f5034a2d7fb-scripts\") pod \"placement-7846dbb58d-4ftsw\" (UID: \"7b7c84ea-91fb-467d-b935-6f5034a2d7fb\") " pod="openstack/placement-7846dbb58d-4ftsw" Sep 29 19:25:37 crc kubenswrapper[4779]: I0929 19:25:37.462432 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/7b7c84ea-91fb-467d-b935-6f5034a2d7fb-public-tls-certs\") pod \"placement-7846dbb58d-4ftsw\" (UID: \"7b7c84ea-91fb-467d-b935-6f5034a2d7fb\") " pod="openstack/placement-7846dbb58d-4ftsw" Sep 29 19:25:37 crc kubenswrapper[4779]: I0929 19:25:37.462453 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7b7c84ea-91fb-467d-b935-6f5034a2d7fb-logs\") pod \"placement-7846dbb58d-4ftsw\" (UID: \"7b7c84ea-91fb-467d-b935-6f5034a2d7fb\") " pod="openstack/placement-7846dbb58d-4ftsw" Sep 29 19:25:37 crc kubenswrapper[4779]: I0929 19:25:37.462516 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7b7c84ea-91fb-467d-b935-6f5034a2d7fb-combined-ca-bundle\") pod \"placement-7846dbb58d-4ftsw\" (UID: \"7b7c84ea-91fb-467d-b935-6f5034a2d7fb\") " pod="openstack/placement-7846dbb58d-4ftsw" Sep 29 19:25:37 crc kubenswrapper[4779]: I0929 19:25:37.462542 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7b7c84ea-91fb-467d-b935-6f5034a2d7fb-config-data\") pod \"placement-7846dbb58d-4ftsw\" (UID: \"7b7c84ea-91fb-467d-b935-6f5034a2d7fb\") " pod="openstack/placement-7846dbb58d-4ftsw" Sep 29 19:25:37 crc kubenswrapper[4779]: I0929 19:25:37.462617 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r77fs\" (UniqueName: \"kubernetes.io/projected/7b7c84ea-91fb-467d-b935-6f5034a2d7fb-kube-api-access-r77fs\") pod \"placement-7846dbb58d-4ftsw\" (UID: \"7b7c84ea-91fb-467d-b935-6f5034a2d7fb\") " pod="openstack/placement-7846dbb58d-4ftsw" Sep 29 19:25:37 crc kubenswrapper[4779]: I0929 19:25:37.462707 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/7b7c84ea-91fb-467d-b935-6f5034a2d7fb-internal-tls-certs\") pod \"placement-7846dbb58d-4ftsw\" (UID: \"7b7c84ea-91fb-467d-b935-6f5034a2d7fb\") " pod="openstack/placement-7846dbb58d-4ftsw" Sep 29 19:25:37 crc kubenswrapper[4779]: I0929 19:25:37.464693 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7b7c84ea-91fb-467d-b935-6f5034a2d7fb-logs\") pod \"placement-7846dbb58d-4ftsw\" (UID: \"7b7c84ea-91fb-467d-b935-6f5034a2d7fb\") " pod="openstack/placement-7846dbb58d-4ftsw" Sep 29 19:25:37 crc kubenswrapper[4779]: I0929 19:25:37.468698 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7b7c84ea-91fb-467d-b935-6f5034a2d7fb-combined-ca-bundle\") pod \"placement-7846dbb58d-4ftsw\" (UID: \"7b7c84ea-91fb-467d-b935-6f5034a2d7fb\") " pod="openstack/placement-7846dbb58d-4ftsw" Sep 29 19:25:37 crc kubenswrapper[4779]: I0929 19:25:37.470973 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7b7c84ea-91fb-467d-b935-6f5034a2d7fb-config-data\") pod \"placement-7846dbb58d-4ftsw\" (UID: \"7b7c84ea-91fb-467d-b935-6f5034a2d7fb\") " pod="openstack/placement-7846dbb58d-4ftsw" Sep 29 19:25:37 crc kubenswrapper[4779]: I0929 19:25:37.470991 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7b7c84ea-91fb-467d-b935-6f5034a2d7fb-scripts\") pod \"placement-7846dbb58d-4ftsw\" (UID: \"7b7c84ea-91fb-467d-b935-6f5034a2d7fb\") " pod="openstack/placement-7846dbb58d-4ftsw" Sep 29 19:25:37 crc kubenswrapper[4779]: I0929 19:25:37.471903 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/7b7c84ea-91fb-467d-b935-6f5034a2d7fb-public-tls-certs\") pod \"placement-7846dbb58d-4ftsw\" (UID: \"7b7c84ea-91fb-467d-b935-6f5034a2d7fb\") " pod="openstack/placement-7846dbb58d-4ftsw" Sep 29 19:25:37 crc kubenswrapper[4779]: I0929 19:25:37.472566 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/7b7c84ea-91fb-467d-b935-6f5034a2d7fb-internal-tls-certs\") pod \"placement-7846dbb58d-4ftsw\" (UID: \"7b7c84ea-91fb-467d-b935-6f5034a2d7fb\") " pod="openstack/placement-7846dbb58d-4ftsw" Sep 29 19:25:37 crc kubenswrapper[4779]: I0929 19:25:37.493701 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r77fs\" (UniqueName: \"kubernetes.io/projected/7b7c84ea-91fb-467d-b935-6f5034a2d7fb-kube-api-access-r77fs\") pod \"placement-7846dbb58d-4ftsw\" (UID: \"7b7c84ea-91fb-467d-b935-6f5034a2d7fb\") " pod="openstack/placement-7846dbb58d-4ftsw" Sep 29 19:25:37 crc kubenswrapper[4779]: I0929 19:25:37.562663 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-7846dbb58d-4ftsw" Sep 29 19:25:37 crc kubenswrapper[4779]: I0929 19:25:37.575734 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-db-sync-wnn5c"] Sep 29 19:25:37 crc kubenswrapper[4779]: I0929 19:25:37.576752 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-wnn5c" Sep 29 19:25:37 crc kubenswrapper[4779]: I0929 19:25:37.582034 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scripts" Sep 29 19:25:37 crc kubenswrapper[4779]: I0929 19:25:37.582093 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-config-data" Sep 29 19:25:37 crc kubenswrapper[4779]: I0929 19:25:37.582124 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-cinder-dockercfg-s6cd5" Sep 29 19:25:37 crc kubenswrapper[4779]: I0929 19:25:37.592131 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-sync-wnn5c"] Sep 29 19:25:37 crc kubenswrapper[4779]: I0929 19:25:37.665763 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e2a74779-76d8-4fee-bd24-cb11d5d72915-scripts\") pod \"cinder-db-sync-wnn5c\" (UID: \"e2a74779-76d8-4fee-bd24-cb11d5d72915\") " pod="openstack/cinder-db-sync-wnn5c" Sep 29 19:25:37 crc kubenswrapper[4779]: I0929 19:25:37.665842 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x4h4h\" (UniqueName: \"kubernetes.io/projected/e2a74779-76d8-4fee-bd24-cb11d5d72915-kube-api-access-x4h4h\") pod \"cinder-db-sync-wnn5c\" (UID: \"e2a74779-76d8-4fee-bd24-cb11d5d72915\") " pod="openstack/cinder-db-sync-wnn5c" Sep 29 19:25:37 crc kubenswrapper[4779]: I0929 19:25:37.665878 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e2a74779-76d8-4fee-bd24-cb11d5d72915-combined-ca-bundle\") pod \"cinder-db-sync-wnn5c\" (UID: \"e2a74779-76d8-4fee-bd24-cb11d5d72915\") " pod="openstack/cinder-db-sync-wnn5c" Sep 29 19:25:37 crc kubenswrapper[4779]: I0929 19:25:37.665981 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/e2a74779-76d8-4fee-bd24-cb11d5d72915-etc-machine-id\") pod \"cinder-db-sync-wnn5c\" (UID: \"e2a74779-76d8-4fee-bd24-cb11d5d72915\") " pod="openstack/cinder-db-sync-wnn5c" Sep 29 19:25:37 crc kubenswrapper[4779]: I0929 19:25:37.666013 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e2a74779-76d8-4fee-bd24-cb11d5d72915-config-data\") pod \"cinder-db-sync-wnn5c\" (UID: \"e2a74779-76d8-4fee-bd24-cb11d5d72915\") " pod="openstack/cinder-db-sync-wnn5c" Sep 29 19:25:37 crc kubenswrapper[4779]: I0929 19:25:37.666042 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/e2a74779-76d8-4fee-bd24-cb11d5d72915-db-sync-config-data\") pod \"cinder-db-sync-wnn5c\" (UID: \"e2a74779-76d8-4fee-bd24-cb11d5d72915\") " pod="openstack/cinder-db-sync-wnn5c" Sep 29 19:25:37 crc kubenswrapper[4779]: I0929 19:25:37.766943 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/e2a74779-76d8-4fee-bd24-cb11d5d72915-etc-machine-id\") pod \"cinder-db-sync-wnn5c\" (UID: \"e2a74779-76d8-4fee-bd24-cb11d5d72915\") " pod="openstack/cinder-db-sync-wnn5c" Sep 29 19:25:37 crc kubenswrapper[4779]: I0929 19:25:37.766988 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e2a74779-76d8-4fee-bd24-cb11d5d72915-config-data\") pod \"cinder-db-sync-wnn5c\" (UID: \"e2a74779-76d8-4fee-bd24-cb11d5d72915\") " pod="openstack/cinder-db-sync-wnn5c" Sep 29 19:25:37 crc kubenswrapper[4779]: I0929 19:25:37.767015 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/e2a74779-76d8-4fee-bd24-cb11d5d72915-db-sync-config-data\") pod \"cinder-db-sync-wnn5c\" (UID: \"e2a74779-76d8-4fee-bd24-cb11d5d72915\") " pod="openstack/cinder-db-sync-wnn5c" Sep 29 19:25:37 crc kubenswrapper[4779]: I0929 19:25:37.767068 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e2a74779-76d8-4fee-bd24-cb11d5d72915-scripts\") pod \"cinder-db-sync-wnn5c\" (UID: \"e2a74779-76d8-4fee-bd24-cb11d5d72915\") " pod="openstack/cinder-db-sync-wnn5c" Sep 29 19:25:37 crc kubenswrapper[4779]: I0929 19:25:37.767101 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x4h4h\" (UniqueName: \"kubernetes.io/projected/e2a74779-76d8-4fee-bd24-cb11d5d72915-kube-api-access-x4h4h\") pod \"cinder-db-sync-wnn5c\" (UID: \"e2a74779-76d8-4fee-bd24-cb11d5d72915\") " pod="openstack/cinder-db-sync-wnn5c" Sep 29 19:25:37 crc kubenswrapper[4779]: I0929 19:25:37.767122 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e2a74779-76d8-4fee-bd24-cb11d5d72915-combined-ca-bundle\") pod \"cinder-db-sync-wnn5c\" (UID: \"e2a74779-76d8-4fee-bd24-cb11d5d72915\") " pod="openstack/cinder-db-sync-wnn5c" Sep 29 19:25:37 crc kubenswrapper[4779]: I0929 19:25:37.767661 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/e2a74779-76d8-4fee-bd24-cb11d5d72915-etc-machine-id\") pod \"cinder-db-sync-wnn5c\" (UID: \"e2a74779-76d8-4fee-bd24-cb11d5d72915\") " pod="openstack/cinder-db-sync-wnn5c" Sep 29 19:25:37 crc kubenswrapper[4779]: I0929 19:25:37.775211 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e2a74779-76d8-4fee-bd24-cb11d5d72915-combined-ca-bundle\") pod \"cinder-db-sync-wnn5c\" (UID: \"e2a74779-76d8-4fee-bd24-cb11d5d72915\") " pod="openstack/cinder-db-sync-wnn5c" Sep 29 19:25:37 crc kubenswrapper[4779]: I0929 19:25:37.787574 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x4h4h\" (UniqueName: \"kubernetes.io/projected/e2a74779-76d8-4fee-bd24-cb11d5d72915-kube-api-access-x4h4h\") pod \"cinder-db-sync-wnn5c\" (UID: \"e2a74779-76d8-4fee-bd24-cb11d5d72915\") " pod="openstack/cinder-db-sync-wnn5c" Sep 29 19:25:37 crc kubenswrapper[4779]: I0929 19:25:37.794679 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/e2a74779-76d8-4fee-bd24-cb11d5d72915-db-sync-config-data\") pod \"cinder-db-sync-wnn5c\" (UID: \"e2a74779-76d8-4fee-bd24-cb11d5d72915\") " pod="openstack/cinder-db-sync-wnn5c" Sep 29 19:25:37 crc kubenswrapper[4779]: I0929 19:25:37.798424 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e2a74779-76d8-4fee-bd24-cb11d5d72915-config-data\") pod \"cinder-db-sync-wnn5c\" (UID: \"e2a74779-76d8-4fee-bd24-cb11d5d72915\") " pod="openstack/cinder-db-sync-wnn5c" Sep 29 19:25:37 crc kubenswrapper[4779]: I0929 19:25:37.799088 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e2a74779-76d8-4fee-bd24-cb11d5d72915-scripts\") pod \"cinder-db-sync-wnn5c\" (UID: \"e2a74779-76d8-4fee-bd24-cb11d5d72915\") " pod="openstack/cinder-db-sync-wnn5c" Sep 29 19:25:37 crc kubenswrapper[4779]: I0929 19:25:37.903972 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-wnn5c" Sep 29 19:25:37 crc kubenswrapper[4779]: I0929 19:25:37.989574 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-db-sync-2kkwn"] Sep 29 19:25:37 crc kubenswrapper[4779]: I0929 19:25:37.991043 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-2kkwn" Sep 29 19:25:37 crc kubenswrapper[4779]: I0929 19:25:37.992992 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-barbican-dockercfg-8lt8h" Sep 29 19:25:37 crc kubenswrapper[4779]: I0929 19:25:37.994461 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-config-data" Sep 29 19:25:37 crc kubenswrapper[4779]: I0929 19:25:37.997662 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-sync-2kkwn"] Sep 29 19:25:38 crc kubenswrapper[4779]: I0929 19:25:38.074399 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0af2a79f-956f-478b-baa6-bc18b1accce9-combined-ca-bundle\") pod \"barbican-db-sync-2kkwn\" (UID: \"0af2a79f-956f-478b-baa6-bc18b1accce9\") " pod="openstack/barbican-db-sync-2kkwn" Sep 29 19:25:38 crc kubenswrapper[4779]: I0929 19:25:38.074680 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/0af2a79f-956f-478b-baa6-bc18b1accce9-db-sync-config-data\") pod \"barbican-db-sync-2kkwn\" (UID: \"0af2a79f-956f-478b-baa6-bc18b1accce9\") " pod="openstack/barbican-db-sync-2kkwn" Sep 29 19:25:38 crc kubenswrapper[4779]: I0929 19:25:38.074768 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8vgc5\" (UniqueName: \"kubernetes.io/projected/0af2a79f-956f-478b-baa6-bc18b1accce9-kube-api-access-8vgc5\") pod \"barbican-db-sync-2kkwn\" (UID: \"0af2a79f-956f-478b-baa6-bc18b1accce9\") " pod="openstack/barbican-db-sync-2kkwn" Sep 29 19:25:38 crc kubenswrapper[4779]: I0929 19:25:38.074911 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-db-sync-6rvdb"] Sep 29 19:25:38 crc kubenswrapper[4779]: I0929 19:25:38.075954 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-6rvdb" Sep 29 19:25:38 crc kubenswrapper[4779]: I0929 19:25:38.079603 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-httpd-config" Sep 29 19:25:38 crc kubenswrapper[4779]: I0929 19:25:38.079897 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-neutron-dockercfg-7jktj" Sep 29 19:25:38 crc kubenswrapper[4779]: I0929 19:25:38.080079 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-config" Sep 29 19:25:38 crc kubenswrapper[4779]: I0929 19:25:38.082935 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-sync-6rvdb"] Sep 29 19:25:38 crc kubenswrapper[4779]: I0929 19:25:38.176167 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0af2a79f-956f-478b-baa6-bc18b1accce9-combined-ca-bundle\") pod \"barbican-db-sync-2kkwn\" (UID: \"0af2a79f-956f-478b-baa6-bc18b1accce9\") " pod="openstack/barbican-db-sync-2kkwn" Sep 29 19:25:38 crc kubenswrapper[4779]: I0929 19:25:38.176240 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-46hzq\" (UniqueName: \"kubernetes.io/projected/f4c19704-2fd1-4d08-a947-f80d1d84f543-kube-api-access-46hzq\") pod \"neutron-db-sync-6rvdb\" (UID: \"f4c19704-2fd1-4d08-a947-f80d1d84f543\") " pod="openstack/neutron-db-sync-6rvdb" Sep 29 19:25:38 crc kubenswrapper[4779]: I0929 19:25:38.176271 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f4c19704-2fd1-4d08-a947-f80d1d84f543-combined-ca-bundle\") pod \"neutron-db-sync-6rvdb\" (UID: \"f4c19704-2fd1-4d08-a947-f80d1d84f543\") " pod="openstack/neutron-db-sync-6rvdb" Sep 29 19:25:38 crc kubenswrapper[4779]: I0929 19:25:38.176293 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/0af2a79f-956f-478b-baa6-bc18b1accce9-db-sync-config-data\") pod \"barbican-db-sync-2kkwn\" (UID: \"0af2a79f-956f-478b-baa6-bc18b1accce9\") " pod="openstack/barbican-db-sync-2kkwn" Sep 29 19:25:38 crc kubenswrapper[4779]: I0929 19:25:38.176333 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8vgc5\" (UniqueName: \"kubernetes.io/projected/0af2a79f-956f-478b-baa6-bc18b1accce9-kube-api-access-8vgc5\") pod \"barbican-db-sync-2kkwn\" (UID: \"0af2a79f-956f-478b-baa6-bc18b1accce9\") " pod="openstack/barbican-db-sync-2kkwn" Sep 29 19:25:38 crc kubenswrapper[4779]: I0929 19:25:38.176354 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/f4c19704-2fd1-4d08-a947-f80d1d84f543-config\") pod \"neutron-db-sync-6rvdb\" (UID: \"f4c19704-2fd1-4d08-a947-f80d1d84f543\") " pod="openstack/neutron-db-sync-6rvdb" Sep 29 19:25:38 crc kubenswrapper[4779]: I0929 19:25:38.180969 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0af2a79f-956f-478b-baa6-bc18b1accce9-combined-ca-bundle\") pod \"barbican-db-sync-2kkwn\" (UID: \"0af2a79f-956f-478b-baa6-bc18b1accce9\") " pod="openstack/barbican-db-sync-2kkwn" Sep 29 19:25:38 crc kubenswrapper[4779]: I0929 19:25:38.234105 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/0af2a79f-956f-478b-baa6-bc18b1accce9-db-sync-config-data\") pod \"barbican-db-sync-2kkwn\" (UID: \"0af2a79f-956f-478b-baa6-bc18b1accce9\") " pod="openstack/barbican-db-sync-2kkwn" Sep 29 19:25:38 crc kubenswrapper[4779]: I0929 19:25:38.236897 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8vgc5\" (UniqueName: \"kubernetes.io/projected/0af2a79f-956f-478b-baa6-bc18b1accce9-kube-api-access-8vgc5\") pod \"barbican-db-sync-2kkwn\" (UID: \"0af2a79f-956f-478b-baa6-bc18b1accce9\") " pod="openstack/barbican-db-sync-2kkwn" Sep 29 19:25:38 crc kubenswrapper[4779]: I0929 19:25:38.278283 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-46hzq\" (UniqueName: \"kubernetes.io/projected/f4c19704-2fd1-4d08-a947-f80d1d84f543-kube-api-access-46hzq\") pod \"neutron-db-sync-6rvdb\" (UID: \"f4c19704-2fd1-4d08-a947-f80d1d84f543\") " pod="openstack/neutron-db-sync-6rvdb" Sep 29 19:25:38 crc kubenswrapper[4779]: I0929 19:25:38.278743 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f4c19704-2fd1-4d08-a947-f80d1d84f543-combined-ca-bundle\") pod \"neutron-db-sync-6rvdb\" (UID: \"f4c19704-2fd1-4d08-a947-f80d1d84f543\") " pod="openstack/neutron-db-sync-6rvdb" Sep 29 19:25:38 crc kubenswrapper[4779]: I0929 19:25:38.278816 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/f4c19704-2fd1-4d08-a947-f80d1d84f543-config\") pod \"neutron-db-sync-6rvdb\" (UID: \"f4c19704-2fd1-4d08-a947-f80d1d84f543\") " pod="openstack/neutron-db-sync-6rvdb" Sep 29 19:25:38 crc kubenswrapper[4779]: I0929 19:25:38.283278 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f4c19704-2fd1-4d08-a947-f80d1d84f543-combined-ca-bundle\") pod \"neutron-db-sync-6rvdb\" (UID: \"f4c19704-2fd1-4d08-a947-f80d1d84f543\") " pod="openstack/neutron-db-sync-6rvdb" Sep 29 19:25:38 crc kubenswrapper[4779]: I0929 19:25:38.284805 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/f4c19704-2fd1-4d08-a947-f80d1d84f543-config\") pod \"neutron-db-sync-6rvdb\" (UID: \"f4c19704-2fd1-4d08-a947-f80d1d84f543\") " pod="openstack/neutron-db-sync-6rvdb" Sep 29 19:25:38 crc kubenswrapper[4779]: I0929 19:25:38.300574 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-46hzq\" (UniqueName: \"kubernetes.io/projected/f4c19704-2fd1-4d08-a947-f80d1d84f543-kube-api-access-46hzq\") pod \"neutron-db-sync-6rvdb\" (UID: \"f4c19704-2fd1-4d08-a947-f80d1d84f543\") " pod="openstack/neutron-db-sync-6rvdb" Sep 29 19:25:38 crc kubenswrapper[4779]: I0929 19:25:38.312898 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-2kkwn" Sep 29 19:25:38 crc kubenswrapper[4779]: I0929 19:25:38.403003 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-6rvdb" Sep 29 19:25:38 crc kubenswrapper[4779]: I0929 19:25:38.720123 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-6c8fdd957c-ft6jd" Sep 29 19:25:40 crc kubenswrapper[4779]: I0929 19:25:40.322408 4779 generic.go:334] "Generic (PLEG): container finished" podID="2a64e434-cdce-47ed-9c44-ab5109920fc7" containerID="aeef032f15bbbc9a9afdbe35cf7bcd63a67cdbcb686d8bab9bb709083cb9ae8a" exitCode=0 Sep 29 19:25:40 crc kubenswrapper[4779]: I0929 19:25:40.322455 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-nrnv2" event={"ID":"2a64e434-cdce-47ed-9c44-ab5109920fc7","Type":"ContainerDied","Data":"aeef032f15bbbc9a9afdbe35cf7bcd63a67cdbcb686d8bab9bb709083cb9ae8a"} Sep 29 19:25:41 crc kubenswrapper[4779]: I0929 19:25:41.326985 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-7846dbb58d-4ftsw"] Sep 29 19:25:41 crc kubenswrapper[4779]: W0929 19:25:41.333300 4779 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf4c19704_2fd1_4d08_a947_f80d1d84f543.slice/crio-7cc561e2e075be5c81b19239c6367abe999b4271ce215b454a6aec4de3f8469c WatchSource:0}: Error finding container 7cc561e2e075be5c81b19239c6367abe999b4271ce215b454a6aec4de3f8469c: Status 404 returned error can't find the container with id 7cc561e2e075be5c81b19239c6367abe999b4271ce215b454a6aec4de3f8469c Sep 29 19:25:41 crc kubenswrapper[4779]: I0929 19:25:41.333493 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-sync-6rvdb"] Sep 29 19:25:41 crc kubenswrapper[4779]: W0929 19:25:41.333620 4779 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7b7c84ea_91fb_467d_b935_6f5034a2d7fb.slice/crio-2fc4c57910c2c1d6fa9ce2eb2785ec02aa53d52636892055b385bba772b4cc82 WatchSource:0}: Error finding container 2fc4c57910c2c1d6fa9ce2eb2785ec02aa53d52636892055b385bba772b4cc82: Status 404 returned error can't find the container with id 2fc4c57910c2c1d6fa9ce2eb2785ec02aa53d52636892055b385bba772b4cc82 Sep 29 19:25:41 crc kubenswrapper[4779]: I0929 19:25:41.341621 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ab4fe873-f58e-4b0f-8d40-1ac1be214b0c","Type":"ContainerStarted","Data":"daa521aec9b0a742cf6899638c60a27ba11d442406f1ac28b6780c148d42a4ec"} Sep 29 19:25:41 crc kubenswrapper[4779]: I0929 19:25:41.447833 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-sync-2kkwn"] Sep 29 19:25:41 crc kubenswrapper[4779]: I0929 19:25:41.462442 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-sync-wnn5c"] Sep 29 19:25:41 crc kubenswrapper[4779]: W0929 19:25:41.464220 4779 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0af2a79f_956f_478b_baa6_bc18b1accce9.slice/crio-5224312ab7ff387fb01862b7f5291e698487a00004ef17a9e48e56210b4157ab WatchSource:0}: Error finding container 5224312ab7ff387fb01862b7f5291e698487a00004ef17a9e48e56210b4157ab: Status 404 returned error can't find the container with id 5224312ab7ff387fb01862b7f5291e698487a00004ef17a9e48e56210b4157ab Sep 29 19:25:41 crc kubenswrapper[4779]: I0929 19:25:41.612328 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-nrnv2" Sep 29 19:25:41 crc kubenswrapper[4779]: I0929 19:25:41.645511 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2a64e434-cdce-47ed-9c44-ab5109920fc7-combined-ca-bundle\") pod \"2a64e434-cdce-47ed-9c44-ab5109920fc7\" (UID: \"2a64e434-cdce-47ed-9c44-ab5109920fc7\") " Sep 29 19:25:41 crc kubenswrapper[4779]: I0929 19:25:41.645744 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2a64e434-cdce-47ed-9c44-ab5109920fc7-scripts\") pod \"2a64e434-cdce-47ed-9c44-ab5109920fc7\" (UID: \"2a64e434-cdce-47ed-9c44-ab5109920fc7\") " Sep 29 19:25:41 crc kubenswrapper[4779]: I0929 19:25:41.645856 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2pk8j\" (UniqueName: \"kubernetes.io/projected/2a64e434-cdce-47ed-9c44-ab5109920fc7-kube-api-access-2pk8j\") pod \"2a64e434-cdce-47ed-9c44-ab5109920fc7\" (UID: \"2a64e434-cdce-47ed-9c44-ab5109920fc7\") " Sep 29 19:25:41 crc kubenswrapper[4779]: I0929 19:25:41.646007 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/2a64e434-cdce-47ed-9c44-ab5109920fc7-credential-keys\") pod \"2a64e434-cdce-47ed-9c44-ab5109920fc7\" (UID: \"2a64e434-cdce-47ed-9c44-ab5109920fc7\") " Sep 29 19:25:41 crc kubenswrapper[4779]: I0929 19:25:41.647091 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2a64e434-cdce-47ed-9c44-ab5109920fc7-config-data\") pod \"2a64e434-cdce-47ed-9c44-ab5109920fc7\" (UID: \"2a64e434-cdce-47ed-9c44-ab5109920fc7\") " Sep 29 19:25:41 crc kubenswrapper[4779]: I0929 19:25:41.647207 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/2a64e434-cdce-47ed-9c44-ab5109920fc7-fernet-keys\") pod \"2a64e434-cdce-47ed-9c44-ab5109920fc7\" (UID: \"2a64e434-cdce-47ed-9c44-ab5109920fc7\") " Sep 29 19:25:41 crc kubenswrapper[4779]: I0929 19:25:41.650948 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2a64e434-cdce-47ed-9c44-ab5109920fc7-scripts" (OuterVolumeSpecName: "scripts") pod "2a64e434-cdce-47ed-9c44-ab5109920fc7" (UID: "2a64e434-cdce-47ed-9c44-ab5109920fc7"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:25:41 crc kubenswrapper[4779]: I0929 19:25:41.651023 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2a64e434-cdce-47ed-9c44-ab5109920fc7-kube-api-access-2pk8j" (OuterVolumeSpecName: "kube-api-access-2pk8j") pod "2a64e434-cdce-47ed-9c44-ab5109920fc7" (UID: "2a64e434-cdce-47ed-9c44-ab5109920fc7"). InnerVolumeSpecName "kube-api-access-2pk8j". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:25:41 crc kubenswrapper[4779]: I0929 19:25:41.651145 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2a64e434-cdce-47ed-9c44-ab5109920fc7-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "2a64e434-cdce-47ed-9c44-ab5109920fc7" (UID: "2a64e434-cdce-47ed-9c44-ab5109920fc7"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:25:41 crc kubenswrapper[4779]: I0929 19:25:41.654258 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2a64e434-cdce-47ed-9c44-ab5109920fc7-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "2a64e434-cdce-47ed-9c44-ab5109920fc7" (UID: "2a64e434-cdce-47ed-9c44-ab5109920fc7"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:25:41 crc kubenswrapper[4779]: I0929 19:25:41.671903 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2a64e434-cdce-47ed-9c44-ab5109920fc7-config-data" (OuterVolumeSpecName: "config-data") pod "2a64e434-cdce-47ed-9c44-ab5109920fc7" (UID: "2a64e434-cdce-47ed-9c44-ab5109920fc7"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:25:41 crc kubenswrapper[4779]: I0929 19:25:41.679108 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2a64e434-cdce-47ed-9c44-ab5109920fc7-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "2a64e434-cdce-47ed-9c44-ab5109920fc7" (UID: "2a64e434-cdce-47ed-9c44-ab5109920fc7"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:25:41 crc kubenswrapper[4779]: I0929 19:25:41.749263 4779 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/2a64e434-cdce-47ed-9c44-ab5109920fc7-credential-keys\") on node \"crc\" DevicePath \"\"" Sep 29 19:25:41 crc kubenswrapper[4779]: I0929 19:25:41.749299 4779 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2a64e434-cdce-47ed-9c44-ab5109920fc7-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 19:25:41 crc kubenswrapper[4779]: I0929 19:25:41.749312 4779 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/2a64e434-cdce-47ed-9c44-ab5109920fc7-fernet-keys\") on node \"crc\" DevicePath \"\"" Sep 29 19:25:41 crc kubenswrapper[4779]: I0929 19:25:41.749346 4779 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2a64e434-cdce-47ed-9c44-ab5109920fc7-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 19:25:41 crc kubenswrapper[4779]: I0929 19:25:41.749360 4779 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2a64e434-cdce-47ed-9c44-ab5109920fc7-scripts\") on node \"crc\" DevicePath \"\"" Sep 29 19:25:41 crc kubenswrapper[4779]: I0929 19:25:41.749374 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2pk8j\" (UniqueName: \"kubernetes.io/projected/2a64e434-cdce-47ed-9c44-ab5109920fc7-kube-api-access-2pk8j\") on node \"crc\" DevicePath \"\"" Sep 29 19:25:42 crc kubenswrapper[4779]: I0929 19:25:42.360611 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-7846dbb58d-4ftsw" event={"ID":"7b7c84ea-91fb-467d-b935-6f5034a2d7fb","Type":"ContainerStarted","Data":"d0abfeeeedae1fc82ea4a10068b6fd3b6fd23e66482d179126836202182387a0"} Sep 29 19:25:42 crc kubenswrapper[4779]: I0929 19:25:42.360856 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-7846dbb58d-4ftsw" event={"ID":"7b7c84ea-91fb-467d-b935-6f5034a2d7fb","Type":"ContainerStarted","Data":"828c52552630e232330c336e356495a586e5949f9870ad3ef39d35832a812ab4"} Sep 29 19:25:42 crc kubenswrapper[4779]: I0929 19:25:42.360865 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-7846dbb58d-4ftsw" event={"ID":"7b7c84ea-91fb-467d-b935-6f5034a2d7fb","Type":"ContainerStarted","Data":"2fc4c57910c2c1d6fa9ce2eb2785ec02aa53d52636892055b385bba772b4cc82"} Sep 29 19:25:42 crc kubenswrapper[4779]: I0929 19:25:42.363528 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/placement-7846dbb58d-4ftsw" Sep 29 19:25:42 crc kubenswrapper[4779]: I0929 19:25:42.363563 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/placement-7846dbb58d-4ftsw" Sep 29 19:25:42 crc kubenswrapper[4779]: I0929 19:25:42.388597 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-wnn5c" event={"ID":"e2a74779-76d8-4fee-bd24-cb11d5d72915","Type":"ContainerStarted","Data":"96746c5fc080a933c2c9e0301ae13cc197a4ad4231b70cd4f5c37ce7fd3bbb55"} Sep 29 19:25:42 crc kubenswrapper[4779]: I0929 19:25:42.399007 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-2kkwn" event={"ID":"0af2a79f-956f-478b-baa6-bc18b1accce9","Type":"ContainerStarted","Data":"5224312ab7ff387fb01862b7f5291e698487a00004ef17a9e48e56210b4157ab"} Sep 29 19:25:42 crc kubenswrapper[4779]: I0929 19:25:42.415812 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-nrnv2" Sep 29 19:25:42 crc kubenswrapper[4779]: I0929 19:25:42.421522 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-nrnv2" event={"ID":"2a64e434-cdce-47ed-9c44-ab5109920fc7","Type":"ContainerDied","Data":"75ddf99dada94b08b21303ac71dd63e7ad30469801a494615fe0c6b952b074de"} Sep 29 19:25:42 crc kubenswrapper[4779]: I0929 19:25:42.421579 4779 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="75ddf99dada94b08b21303ac71dd63e7ad30469801a494615fe0c6b952b074de" Sep 29 19:25:42 crc kubenswrapper[4779]: I0929 19:25:42.450662 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-6rvdb" event={"ID":"f4c19704-2fd1-4d08-a947-f80d1d84f543","Type":"ContainerStarted","Data":"a6b63f090a525b692b14f054a947c515919a67e98d9fad30671a586cd42e7562"} Sep 29 19:25:42 crc kubenswrapper[4779]: I0929 19:25:42.450704 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-6rvdb" event={"ID":"f4c19704-2fd1-4d08-a947-f80d1d84f543","Type":"ContainerStarted","Data":"7cc561e2e075be5c81b19239c6367abe999b4271ce215b454a6aec4de3f8469c"} Sep 29 19:25:42 crc kubenswrapper[4779]: I0929 19:25:42.460195 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-7846dbb58d-4ftsw" podStartSLOduration=5.460176774 podStartE2EDuration="5.460176774s" podCreationTimestamp="2025-09-29 19:25:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 19:25:42.407501364 +0000 UTC m=+1053.291926474" watchObservedRunningTime="2025-09-29 19:25:42.460176774 +0000 UTC m=+1053.344601874" Sep 29 19:25:42 crc kubenswrapper[4779]: I0929 19:25:42.463565 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-6d4574fd6f-56jht"] Sep 29 19:25:42 crc kubenswrapper[4779]: E0929 19:25:42.463971 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2a64e434-cdce-47ed-9c44-ab5109920fc7" containerName="keystone-bootstrap" Sep 29 19:25:42 crc kubenswrapper[4779]: I0929 19:25:42.463982 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="2a64e434-cdce-47ed-9c44-ab5109920fc7" containerName="keystone-bootstrap" Sep 29 19:25:42 crc kubenswrapper[4779]: I0929 19:25:42.464158 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="2a64e434-cdce-47ed-9c44-ab5109920fc7" containerName="keystone-bootstrap" Sep 29 19:25:42 crc kubenswrapper[4779]: I0929 19:25:42.465652 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-6d4574fd6f-56jht" Sep 29 19:25:42 crc kubenswrapper[4779]: I0929 19:25:42.468550 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-m8zpl" Sep 29 19:25:42 crc kubenswrapper[4779]: I0929 19:25:42.468813 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Sep 29 19:25:42 crc kubenswrapper[4779]: I0929 19:25:42.471762 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Sep 29 19:25:42 crc kubenswrapper[4779]: I0929 19:25:42.471956 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Sep 29 19:25:42 crc kubenswrapper[4779]: I0929 19:25:42.472063 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-keystone-internal-svc" Sep 29 19:25:42 crc kubenswrapper[4779]: I0929 19:25:42.472190 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-keystone-public-svc" Sep 29 19:25:42 crc kubenswrapper[4779]: I0929 19:25:42.482049 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-6d4574fd6f-56jht"] Sep 29 19:25:42 crc kubenswrapper[4779]: I0929 19:25:42.508847 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-db-sync-6rvdb" podStartSLOduration=4.508822804 podStartE2EDuration="4.508822804s" podCreationTimestamp="2025-09-29 19:25:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 19:25:42.483198869 +0000 UTC m=+1053.367623969" watchObservedRunningTime="2025-09-29 19:25:42.508822804 +0000 UTC m=+1053.393247904" Sep 29 19:25:42 crc kubenswrapper[4779]: I0929 19:25:42.565168 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/413037bd-ba8f-4874-a915-1c77426d689b-combined-ca-bundle\") pod \"keystone-6d4574fd6f-56jht\" (UID: \"413037bd-ba8f-4874-a915-1c77426d689b\") " pod="openstack/keystone-6d4574fd6f-56jht" Sep 29 19:25:42 crc kubenswrapper[4779]: I0929 19:25:42.565489 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/413037bd-ba8f-4874-a915-1c77426d689b-fernet-keys\") pod \"keystone-6d4574fd6f-56jht\" (UID: \"413037bd-ba8f-4874-a915-1c77426d689b\") " pod="openstack/keystone-6d4574fd6f-56jht" Sep 29 19:25:42 crc kubenswrapper[4779]: I0929 19:25:42.565583 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/413037bd-ba8f-4874-a915-1c77426d689b-config-data\") pod \"keystone-6d4574fd6f-56jht\" (UID: \"413037bd-ba8f-4874-a915-1c77426d689b\") " pod="openstack/keystone-6d4574fd6f-56jht" Sep 29 19:25:42 crc kubenswrapper[4779]: I0929 19:25:42.565655 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/413037bd-ba8f-4874-a915-1c77426d689b-internal-tls-certs\") pod \"keystone-6d4574fd6f-56jht\" (UID: \"413037bd-ba8f-4874-a915-1c77426d689b\") " pod="openstack/keystone-6d4574fd6f-56jht" Sep 29 19:25:42 crc kubenswrapper[4779]: I0929 19:25:42.565777 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/413037bd-ba8f-4874-a915-1c77426d689b-scripts\") pod \"keystone-6d4574fd6f-56jht\" (UID: \"413037bd-ba8f-4874-a915-1c77426d689b\") " pod="openstack/keystone-6d4574fd6f-56jht" Sep 29 19:25:42 crc kubenswrapper[4779]: I0929 19:25:42.565846 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/413037bd-ba8f-4874-a915-1c77426d689b-credential-keys\") pod \"keystone-6d4574fd6f-56jht\" (UID: \"413037bd-ba8f-4874-a915-1c77426d689b\") " pod="openstack/keystone-6d4574fd6f-56jht" Sep 29 19:25:42 crc kubenswrapper[4779]: I0929 19:25:42.565926 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/413037bd-ba8f-4874-a915-1c77426d689b-public-tls-certs\") pod \"keystone-6d4574fd6f-56jht\" (UID: \"413037bd-ba8f-4874-a915-1c77426d689b\") " pod="openstack/keystone-6d4574fd6f-56jht" Sep 29 19:25:42 crc kubenswrapper[4779]: I0929 19:25:42.565995 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w24w2\" (UniqueName: \"kubernetes.io/projected/413037bd-ba8f-4874-a915-1c77426d689b-kube-api-access-w24w2\") pod \"keystone-6d4574fd6f-56jht\" (UID: \"413037bd-ba8f-4874-a915-1c77426d689b\") " pod="openstack/keystone-6d4574fd6f-56jht" Sep 29 19:25:42 crc kubenswrapper[4779]: I0929 19:25:42.668422 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/413037bd-ba8f-4874-a915-1c77426d689b-combined-ca-bundle\") pod \"keystone-6d4574fd6f-56jht\" (UID: \"413037bd-ba8f-4874-a915-1c77426d689b\") " pod="openstack/keystone-6d4574fd6f-56jht" Sep 29 19:25:42 crc kubenswrapper[4779]: I0929 19:25:42.668664 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/413037bd-ba8f-4874-a915-1c77426d689b-fernet-keys\") pod \"keystone-6d4574fd6f-56jht\" (UID: \"413037bd-ba8f-4874-a915-1c77426d689b\") " pod="openstack/keystone-6d4574fd6f-56jht" Sep 29 19:25:42 crc kubenswrapper[4779]: I0929 19:25:42.668780 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/413037bd-ba8f-4874-a915-1c77426d689b-config-data\") pod \"keystone-6d4574fd6f-56jht\" (UID: \"413037bd-ba8f-4874-a915-1c77426d689b\") " pod="openstack/keystone-6d4574fd6f-56jht" Sep 29 19:25:42 crc kubenswrapper[4779]: I0929 19:25:42.668837 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/413037bd-ba8f-4874-a915-1c77426d689b-internal-tls-certs\") pod \"keystone-6d4574fd6f-56jht\" (UID: \"413037bd-ba8f-4874-a915-1c77426d689b\") " pod="openstack/keystone-6d4574fd6f-56jht" Sep 29 19:25:42 crc kubenswrapper[4779]: I0929 19:25:42.668913 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/413037bd-ba8f-4874-a915-1c77426d689b-scripts\") pod \"keystone-6d4574fd6f-56jht\" (UID: \"413037bd-ba8f-4874-a915-1c77426d689b\") " pod="openstack/keystone-6d4574fd6f-56jht" Sep 29 19:25:42 crc kubenswrapper[4779]: I0929 19:25:42.668938 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/413037bd-ba8f-4874-a915-1c77426d689b-credential-keys\") pod \"keystone-6d4574fd6f-56jht\" (UID: \"413037bd-ba8f-4874-a915-1c77426d689b\") " pod="openstack/keystone-6d4574fd6f-56jht" Sep 29 19:25:42 crc kubenswrapper[4779]: I0929 19:25:42.668970 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/413037bd-ba8f-4874-a915-1c77426d689b-public-tls-certs\") pod \"keystone-6d4574fd6f-56jht\" (UID: \"413037bd-ba8f-4874-a915-1c77426d689b\") " pod="openstack/keystone-6d4574fd6f-56jht" Sep 29 19:25:42 crc kubenswrapper[4779]: I0929 19:25:42.669005 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w24w2\" (UniqueName: \"kubernetes.io/projected/413037bd-ba8f-4874-a915-1c77426d689b-kube-api-access-w24w2\") pod \"keystone-6d4574fd6f-56jht\" (UID: \"413037bd-ba8f-4874-a915-1c77426d689b\") " pod="openstack/keystone-6d4574fd6f-56jht" Sep 29 19:25:42 crc kubenswrapper[4779]: I0929 19:25:42.679242 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/413037bd-ba8f-4874-a915-1c77426d689b-combined-ca-bundle\") pod \"keystone-6d4574fd6f-56jht\" (UID: \"413037bd-ba8f-4874-a915-1c77426d689b\") " pod="openstack/keystone-6d4574fd6f-56jht" Sep 29 19:25:42 crc kubenswrapper[4779]: I0929 19:25:42.682342 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/413037bd-ba8f-4874-a915-1c77426d689b-credential-keys\") pod \"keystone-6d4574fd6f-56jht\" (UID: \"413037bd-ba8f-4874-a915-1c77426d689b\") " pod="openstack/keystone-6d4574fd6f-56jht" Sep 29 19:25:42 crc kubenswrapper[4779]: I0929 19:25:42.682758 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/413037bd-ba8f-4874-a915-1c77426d689b-config-data\") pod \"keystone-6d4574fd6f-56jht\" (UID: \"413037bd-ba8f-4874-a915-1c77426d689b\") " pod="openstack/keystone-6d4574fd6f-56jht" Sep 29 19:25:42 crc kubenswrapper[4779]: I0929 19:25:42.683858 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/413037bd-ba8f-4874-a915-1c77426d689b-scripts\") pod \"keystone-6d4574fd6f-56jht\" (UID: \"413037bd-ba8f-4874-a915-1c77426d689b\") " pod="openstack/keystone-6d4574fd6f-56jht" Sep 29 19:25:42 crc kubenswrapper[4779]: I0929 19:25:42.692223 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/413037bd-ba8f-4874-a915-1c77426d689b-public-tls-certs\") pod \"keystone-6d4574fd6f-56jht\" (UID: \"413037bd-ba8f-4874-a915-1c77426d689b\") " pod="openstack/keystone-6d4574fd6f-56jht" Sep 29 19:25:42 crc kubenswrapper[4779]: I0929 19:25:42.707669 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/413037bd-ba8f-4874-a915-1c77426d689b-internal-tls-certs\") pod \"keystone-6d4574fd6f-56jht\" (UID: \"413037bd-ba8f-4874-a915-1c77426d689b\") " pod="openstack/keystone-6d4574fd6f-56jht" Sep 29 19:25:42 crc kubenswrapper[4779]: I0929 19:25:42.709009 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/413037bd-ba8f-4874-a915-1c77426d689b-fernet-keys\") pod \"keystone-6d4574fd6f-56jht\" (UID: \"413037bd-ba8f-4874-a915-1c77426d689b\") " pod="openstack/keystone-6d4574fd6f-56jht" Sep 29 19:25:42 crc kubenswrapper[4779]: I0929 19:25:42.711851 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w24w2\" (UniqueName: \"kubernetes.io/projected/413037bd-ba8f-4874-a915-1c77426d689b-kube-api-access-w24w2\") pod \"keystone-6d4574fd6f-56jht\" (UID: \"413037bd-ba8f-4874-a915-1c77426d689b\") " pod="openstack/keystone-6d4574fd6f-56jht" Sep 29 19:25:42 crc kubenswrapper[4779]: I0929 19:25:42.813576 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-6d4574fd6f-56jht" Sep 29 19:25:43 crc kubenswrapper[4779]: I0929 19:25:43.407252 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-6d4574fd6f-56jht"] Sep 29 19:25:43 crc kubenswrapper[4779]: W0929 19:25:43.418258 4779 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod413037bd_ba8f_4874_a915_1c77426d689b.slice/crio-1892e512a6b24261295b059fbc4b03b8807a26acf4c1363dc549da58eec9044a WatchSource:0}: Error finding container 1892e512a6b24261295b059fbc4b03b8807a26acf4c1363dc549da58eec9044a: Status 404 returned error can't find the container with id 1892e512a6b24261295b059fbc4b03b8807a26acf4c1363dc549da58eec9044a Sep 29 19:25:43 crc kubenswrapper[4779]: I0929 19:25:43.461530 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-6d4574fd6f-56jht" event={"ID":"413037bd-ba8f-4874-a915-1c77426d689b","Type":"ContainerStarted","Data":"1892e512a6b24261295b059fbc4b03b8807a26acf4c1363dc549da58eec9044a"} Sep 29 19:25:43 crc kubenswrapper[4779]: I0929 19:25:43.502558 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Sep 29 19:25:43 crc kubenswrapper[4779]: I0929 19:25:43.502616 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Sep 29 19:25:43 crc kubenswrapper[4779]: I0929 19:25:43.547640 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Sep 29 19:25:43 crc kubenswrapper[4779]: I0929 19:25:43.574783 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Sep 29 19:25:43 crc kubenswrapper[4779]: I0929 19:25:43.729156 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-6f7f5b6d48-8js86" Sep 29 19:25:43 crc kubenswrapper[4779]: I0929 19:25:43.729206 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/horizon-6f7f5b6d48-8js86" Sep 29 19:25:43 crc kubenswrapper[4779]: I0929 19:25:43.785344 4779 patch_prober.go:28] interesting pod/machine-config-daemon-d5cnr container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 19:25:43 crc kubenswrapper[4779]: I0929 19:25:43.785764 4779 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" podUID="476bc421-1113-455e-bcc8-e207e47dad19" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 19:25:43 crc kubenswrapper[4779]: I0929 19:25:43.785827 4779 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" Sep 29 19:25:43 crc kubenswrapper[4779]: I0929 19:25:43.786715 4779 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"0dd6acb2d9b3673c7f5bb54457583b38c1079b65f1949a29faac6a92347b5460"} pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 29 19:25:43 crc kubenswrapper[4779]: I0929 19:25:43.786785 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" podUID="476bc421-1113-455e-bcc8-e207e47dad19" containerName="machine-config-daemon" containerID="cri-o://0dd6acb2d9b3673c7f5bb54457583b38c1079b65f1949a29faac6a92347b5460" gracePeriod=600 Sep 29 19:25:43 crc kubenswrapper[4779]: I0929 19:25:43.863112 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/horizon-fc6fd7df6-btpzz" Sep 29 19:25:43 crc kubenswrapper[4779]: I0929 19:25:43.863148 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-fc6fd7df6-btpzz" Sep 29 19:25:44 crc kubenswrapper[4779]: I0929 19:25:44.472801 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-6d4574fd6f-56jht" event={"ID":"413037bd-ba8f-4874-a915-1c77426d689b","Type":"ContainerStarted","Data":"06c82d151b84164b957a63a99a53102808ba6f2ca2bcfeedce9df52493a799c6"} Sep 29 19:25:44 crc kubenswrapper[4779]: I0929 19:25:44.478606 4779 generic.go:334] "Generic (PLEG): container finished" podID="476bc421-1113-455e-bcc8-e207e47dad19" containerID="0dd6acb2d9b3673c7f5bb54457583b38c1079b65f1949a29faac6a92347b5460" exitCode=0 Sep 29 19:25:44 crc kubenswrapper[4779]: I0929 19:25:44.479749 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" event={"ID":"476bc421-1113-455e-bcc8-e207e47dad19","Type":"ContainerDied","Data":"0dd6acb2d9b3673c7f5bb54457583b38c1079b65f1949a29faac6a92347b5460"} Sep 29 19:25:44 crc kubenswrapper[4779]: I0929 19:25:44.479800 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Sep 29 19:25:44 crc kubenswrapper[4779]: I0929 19:25:44.479814 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Sep 29 19:25:44 crc kubenswrapper[4779]: I0929 19:25:44.479828 4779 scope.go:117] "RemoveContainer" containerID="4a4fdaef3556a5f3b6feb69078b0e45220cdb9c5faaa10378f31938118c0fbae" Sep 29 19:25:44 crc kubenswrapper[4779]: I0929 19:25:44.640977 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Sep 29 19:25:44 crc kubenswrapper[4779]: I0929 19:25:44.641040 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Sep 29 19:25:44 crc kubenswrapper[4779]: I0929 19:25:44.678081 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Sep 29 19:25:44 crc kubenswrapper[4779]: I0929 19:25:44.705439 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-6d4574fd6f-56jht" podStartSLOduration=2.705421324 podStartE2EDuration="2.705421324s" podCreationTimestamp="2025-09-29 19:25:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 19:25:44.499788632 +0000 UTC m=+1055.384213732" watchObservedRunningTime="2025-09-29 19:25:44.705421324 +0000 UTC m=+1055.589846424" Sep 29 19:25:44 crc kubenswrapper[4779]: I0929 19:25:44.705954 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Sep 29 19:25:45 crc kubenswrapper[4779]: I0929 19:25:45.488388 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Sep 29 19:25:45 crc kubenswrapper[4779]: I0929 19:25:45.488540 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Sep 29 19:25:45 crc kubenswrapper[4779]: I0929 19:25:45.488986 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/keystone-6d4574fd6f-56jht" Sep 29 19:25:46 crc kubenswrapper[4779]: I0929 19:25:46.337880 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Sep 29 19:25:46 crc kubenswrapper[4779]: I0929 19:25:46.412984 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Sep 29 19:25:47 crc kubenswrapper[4779]: I0929 19:25:47.519932 4779 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Sep 29 19:25:47 crc kubenswrapper[4779]: I0929 19:25:47.520241 4779 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Sep 29 19:25:47 crc kubenswrapper[4779]: I0929 19:25:47.522455 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Sep 29 19:25:47 crc kubenswrapper[4779]: I0929 19:25:47.547372 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Sep 29 19:25:53 crc kubenswrapper[4779]: I0929 19:25:53.730178 4779 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-6f7f5b6d48-8js86" podUID="442ef8d4-8019-432f-8715-9b2a5aaaa022" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.144:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.144:8443: connect: connection refused" Sep 29 19:25:53 crc kubenswrapper[4779]: I0929 19:25:53.865275 4779 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-fc6fd7df6-btpzz" podUID="6cd722c9-4e9b-4bad-a9fd-84529803680b" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.145:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.145:8443: connect: connection refused" Sep 29 19:26:00 crc kubenswrapper[4779]: E0929 19:26:00.041264 4779 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-barbican-api:current-podified" Sep 29 19:26:00 crc kubenswrapper[4779]: E0929 19:26:00.041934 4779 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:barbican-db-sync,Image:quay.io/podified-antelope-centos9/openstack-barbican-api:current-podified,Command:[/bin/bash],Args:[-c barbican-manage db upgrade],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:TRUE,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:db-sync-config-data,ReadOnly:true,MountPath:/etc/barbican/barbican.conf.d,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-8vgc5,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42403,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:*42403,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod barbican-db-sync-2kkwn_openstack(0af2a79f-956f-478b-baa6-bc18b1accce9): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Sep 29 19:26:00 crc kubenswrapper[4779]: E0929 19:26:00.043149 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"barbican-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/barbican-db-sync-2kkwn" podUID="0af2a79f-956f-478b-baa6-bc18b1accce9" Sep 29 19:26:00 crc kubenswrapper[4779]: E0929 19:26:00.585674 4779 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/ubi9/httpd-24:latest" Sep 29 19:26:00 crc kubenswrapper[4779]: E0929 19:26:00.586076 4779 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:proxy-httpd,Image:registry.redhat.io/ubi9/httpd-24:latest,Command:[/usr/sbin/httpd],Args:[-DFOREGROUND],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:proxy-httpd,HostPort:0,ContainerPort:3000,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config-data,ReadOnly:true,MountPath:/etc/httpd/conf/httpd.conf,SubPath:httpd.conf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/etc/httpd/conf.d/ssl.conf,SubPath:ssl.conf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:run-httpd,ReadOnly:false,MountPath:/run/httpd,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:log-httpd,ReadOnly:false,MountPath:/var/log/httpd,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-t5qlh,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/,Port:{0 3000 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:300,TimeoutSeconds:30,PeriodSeconds:30,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/,Port:{0 3000 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:10,TimeoutSeconds:30,PeriodSeconds:30,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*0,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod ceilometer-0_openstack(ab4fe873-f58e-4b0f-8d40-1ac1be214b0c): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Sep 29 19:26:00 crc kubenswrapper[4779]: E0929 19:26:00.587617 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"ceilometer-central-agent\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\", failed to \"StartContainer\" for \"proxy-httpd\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"]" pod="openstack/ceilometer-0" podUID="ab4fe873-f58e-4b0f-8d40-1ac1be214b0c" Sep 29 19:26:00 crc kubenswrapper[4779]: I0929 19:26:00.646636 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="ab4fe873-f58e-4b0f-8d40-1ac1be214b0c" containerName="ceilometer-notification-agent" containerID="cri-o://549a85bd0aea730e66cd62f4effe60d79b4e0b799a081a8d1cd429c4cc09c2b1" gracePeriod=30 Sep 29 19:26:00 crc kubenswrapper[4779]: I0929 19:26:00.646731 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="ab4fe873-f58e-4b0f-8d40-1ac1be214b0c" containerName="sg-core" containerID="cri-o://daa521aec9b0a742cf6899638c60a27ba11d442406f1ac28b6780c148d42a4ec" gracePeriod=30 Sep 29 19:26:00 crc kubenswrapper[4779]: E0929 19:26:00.649108 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"barbican-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-barbican-api:current-podified\\\"\"" pod="openstack/barbican-db-sync-2kkwn" podUID="0af2a79f-956f-478b-baa6-bc18b1accce9" Sep 29 19:26:01 crc kubenswrapper[4779]: I0929 19:26:01.655451 4779 generic.go:334] "Generic (PLEG): container finished" podID="f4c19704-2fd1-4d08-a947-f80d1d84f543" containerID="a6b63f090a525b692b14f054a947c515919a67e98d9fad30671a586cd42e7562" exitCode=0 Sep 29 19:26:01 crc kubenswrapper[4779]: I0929 19:26:01.655508 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-6rvdb" event={"ID":"f4c19704-2fd1-4d08-a947-f80d1d84f543","Type":"ContainerDied","Data":"a6b63f090a525b692b14f054a947c515919a67e98d9fad30671a586cd42e7562"} Sep 29 19:26:01 crc kubenswrapper[4779]: I0929 19:26:01.660869 4779 generic.go:334] "Generic (PLEG): container finished" podID="ab4fe873-f58e-4b0f-8d40-1ac1be214b0c" containerID="daa521aec9b0a742cf6899638c60a27ba11d442406f1ac28b6780c148d42a4ec" exitCode=2 Sep 29 19:26:01 crc kubenswrapper[4779]: I0929 19:26:01.660909 4779 generic.go:334] "Generic (PLEG): container finished" podID="ab4fe873-f58e-4b0f-8d40-1ac1be214b0c" containerID="549a85bd0aea730e66cd62f4effe60d79b4e0b799a081a8d1cd429c4cc09c2b1" exitCode=0 Sep 29 19:26:01 crc kubenswrapper[4779]: I0929 19:26:01.660937 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ab4fe873-f58e-4b0f-8d40-1ac1be214b0c","Type":"ContainerDied","Data":"daa521aec9b0a742cf6899638c60a27ba11d442406f1ac28b6780c148d42a4ec"} Sep 29 19:26:01 crc kubenswrapper[4779]: I0929 19:26:01.660969 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ab4fe873-f58e-4b0f-8d40-1ac1be214b0c","Type":"ContainerDied","Data":"549a85bd0aea730e66cd62f4effe60d79b4e0b799a081a8d1cd429c4cc09c2b1"} Sep 29 19:26:01 crc kubenswrapper[4779]: E0929 19:26:01.739443 4779 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-cinder-api:current-podified" Sep 29 19:26:01 crc kubenswrapper[4779]: E0929 19:26:01.739946 4779 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:cinder-db-sync,Image:quay.io/podified-antelope-centos9/openstack-cinder-api:current-podified,Command:[/bin/bash],Args:[-c /usr/local/bin/kolla_set_configs && /usr/local/bin/kolla_start],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:TRUE,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:etc-machine-id,ReadOnly:true,MountPath:/etc/machine-id,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:scripts,ReadOnly:true,MountPath:/usr/local/bin/container-scripts,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/config-data/merged,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/etc/my.cnf,SubPath:my.cnf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:db-sync-config-data,ReadOnly:true,MountPath:/etc/cinder/cinder.conf.d,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/config.json,SubPath:db-sync-config.json,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-x4h4h,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:nil,Privileged:nil,SELinuxOptions:nil,RunAsUser:*0,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod cinder-db-sync-wnn5c_openstack(e2a74779-76d8-4fee-bd24-cb11d5d72915): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Sep 29 19:26:01 crc kubenswrapper[4779]: E0929 19:26:01.744536 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cinder-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/cinder-db-sync-wnn5c" podUID="e2a74779-76d8-4fee-bd24-cb11d5d72915" Sep 29 19:26:02 crc kubenswrapper[4779]: I0929 19:26:02.131829 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 19:26:02 crc kubenswrapper[4779]: I0929 19:26:02.333205 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ab4fe873-f58e-4b0f-8d40-1ac1be214b0c-run-httpd\") pod \"ab4fe873-f58e-4b0f-8d40-1ac1be214b0c\" (UID: \"ab4fe873-f58e-4b0f-8d40-1ac1be214b0c\") " Sep 29 19:26:02 crc kubenswrapper[4779]: I0929 19:26:02.333273 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ab4fe873-f58e-4b0f-8d40-1ac1be214b0c-combined-ca-bundle\") pod \"ab4fe873-f58e-4b0f-8d40-1ac1be214b0c\" (UID: \"ab4fe873-f58e-4b0f-8d40-1ac1be214b0c\") " Sep 29 19:26:02 crc kubenswrapper[4779]: I0929 19:26:02.333375 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ab4fe873-f58e-4b0f-8d40-1ac1be214b0c-config-data\") pod \"ab4fe873-f58e-4b0f-8d40-1ac1be214b0c\" (UID: \"ab4fe873-f58e-4b0f-8d40-1ac1be214b0c\") " Sep 29 19:26:02 crc kubenswrapper[4779]: I0929 19:26:02.333456 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ab4fe873-f58e-4b0f-8d40-1ac1be214b0c-scripts\") pod \"ab4fe873-f58e-4b0f-8d40-1ac1be214b0c\" (UID: \"ab4fe873-f58e-4b0f-8d40-1ac1be214b0c\") " Sep 29 19:26:02 crc kubenswrapper[4779]: I0929 19:26:02.333505 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/ab4fe873-f58e-4b0f-8d40-1ac1be214b0c-sg-core-conf-yaml\") pod \"ab4fe873-f58e-4b0f-8d40-1ac1be214b0c\" (UID: \"ab4fe873-f58e-4b0f-8d40-1ac1be214b0c\") " Sep 29 19:26:02 crc kubenswrapper[4779]: I0929 19:26:02.333572 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-t5qlh\" (UniqueName: \"kubernetes.io/projected/ab4fe873-f58e-4b0f-8d40-1ac1be214b0c-kube-api-access-t5qlh\") pod \"ab4fe873-f58e-4b0f-8d40-1ac1be214b0c\" (UID: \"ab4fe873-f58e-4b0f-8d40-1ac1be214b0c\") " Sep 29 19:26:02 crc kubenswrapper[4779]: I0929 19:26:02.333667 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ab4fe873-f58e-4b0f-8d40-1ac1be214b0c-log-httpd\") pod \"ab4fe873-f58e-4b0f-8d40-1ac1be214b0c\" (UID: \"ab4fe873-f58e-4b0f-8d40-1ac1be214b0c\") " Sep 29 19:26:02 crc kubenswrapper[4779]: I0929 19:26:02.334474 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ab4fe873-f58e-4b0f-8d40-1ac1be214b0c-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "ab4fe873-f58e-4b0f-8d40-1ac1be214b0c" (UID: "ab4fe873-f58e-4b0f-8d40-1ac1be214b0c"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 19:26:02 crc kubenswrapper[4779]: I0929 19:26:02.334795 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ab4fe873-f58e-4b0f-8d40-1ac1be214b0c-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "ab4fe873-f58e-4b0f-8d40-1ac1be214b0c" (UID: "ab4fe873-f58e-4b0f-8d40-1ac1be214b0c"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 19:26:02 crc kubenswrapper[4779]: I0929 19:26:02.341860 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ab4fe873-f58e-4b0f-8d40-1ac1be214b0c-scripts" (OuterVolumeSpecName: "scripts") pod "ab4fe873-f58e-4b0f-8d40-1ac1be214b0c" (UID: "ab4fe873-f58e-4b0f-8d40-1ac1be214b0c"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:26:02 crc kubenswrapper[4779]: I0929 19:26:02.346556 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ab4fe873-f58e-4b0f-8d40-1ac1be214b0c-kube-api-access-t5qlh" (OuterVolumeSpecName: "kube-api-access-t5qlh") pod "ab4fe873-f58e-4b0f-8d40-1ac1be214b0c" (UID: "ab4fe873-f58e-4b0f-8d40-1ac1be214b0c"). InnerVolumeSpecName "kube-api-access-t5qlh". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:26:02 crc kubenswrapper[4779]: I0929 19:26:02.362869 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ab4fe873-f58e-4b0f-8d40-1ac1be214b0c-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "ab4fe873-f58e-4b0f-8d40-1ac1be214b0c" (UID: "ab4fe873-f58e-4b0f-8d40-1ac1be214b0c"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:26:02 crc kubenswrapper[4779]: I0929 19:26:02.368554 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ab4fe873-f58e-4b0f-8d40-1ac1be214b0c-config-data" (OuterVolumeSpecName: "config-data") pod "ab4fe873-f58e-4b0f-8d40-1ac1be214b0c" (UID: "ab4fe873-f58e-4b0f-8d40-1ac1be214b0c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:26:02 crc kubenswrapper[4779]: I0929 19:26:02.376985 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ab4fe873-f58e-4b0f-8d40-1ac1be214b0c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "ab4fe873-f58e-4b0f-8d40-1ac1be214b0c" (UID: "ab4fe873-f58e-4b0f-8d40-1ac1be214b0c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:26:02 crc kubenswrapper[4779]: I0929 19:26:02.438657 4779 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ab4fe873-f58e-4b0f-8d40-1ac1be214b0c-scripts\") on node \"crc\" DevicePath \"\"" Sep 29 19:26:02 crc kubenswrapper[4779]: I0929 19:26:02.438704 4779 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/ab4fe873-f58e-4b0f-8d40-1ac1be214b0c-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Sep 29 19:26:02 crc kubenswrapper[4779]: I0929 19:26:02.438737 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-t5qlh\" (UniqueName: \"kubernetes.io/projected/ab4fe873-f58e-4b0f-8d40-1ac1be214b0c-kube-api-access-t5qlh\") on node \"crc\" DevicePath \"\"" Sep 29 19:26:02 crc kubenswrapper[4779]: I0929 19:26:02.438749 4779 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ab4fe873-f58e-4b0f-8d40-1ac1be214b0c-log-httpd\") on node \"crc\" DevicePath \"\"" Sep 29 19:26:02 crc kubenswrapper[4779]: I0929 19:26:02.438765 4779 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ab4fe873-f58e-4b0f-8d40-1ac1be214b0c-run-httpd\") on node \"crc\" DevicePath \"\"" Sep 29 19:26:02 crc kubenswrapper[4779]: I0929 19:26:02.438777 4779 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ab4fe873-f58e-4b0f-8d40-1ac1be214b0c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 19:26:02 crc kubenswrapper[4779]: I0929 19:26:02.438815 4779 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ab4fe873-f58e-4b0f-8d40-1ac1be214b0c-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 19:26:02 crc kubenswrapper[4779]: I0929 19:26:02.675251 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ab4fe873-f58e-4b0f-8d40-1ac1be214b0c","Type":"ContainerDied","Data":"f17533c21ca6c2c20c6d6e870ea9b1533f7fa3b97846a6b9152115c9370ff665"} Sep 29 19:26:02 crc kubenswrapper[4779]: I0929 19:26:02.675586 4779 scope.go:117] "RemoveContainer" containerID="daa521aec9b0a742cf6899638c60a27ba11d442406f1ac28b6780c148d42a4ec" Sep 29 19:26:02 crc kubenswrapper[4779]: I0929 19:26:02.675292 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 19:26:02 crc kubenswrapper[4779]: I0929 19:26:02.680043 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" event={"ID":"476bc421-1113-455e-bcc8-e207e47dad19","Type":"ContainerStarted","Data":"d6651225f345c1ab3f5b037a81a9d8ef2b74dcbcb999a569db1d5ddc8894af03"} Sep 29 19:26:02 crc kubenswrapper[4779]: E0929 19:26:02.683300 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cinder-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-cinder-api:current-podified\\\"\"" pod="openstack/cinder-db-sync-wnn5c" podUID="e2a74779-76d8-4fee-bd24-cb11d5d72915" Sep 29 19:26:02 crc kubenswrapper[4779]: I0929 19:26:02.803061 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Sep 29 19:26:02 crc kubenswrapper[4779]: I0929 19:26:02.816796 4779 scope.go:117] "RemoveContainer" containerID="549a85bd0aea730e66cd62f4effe60d79b4e0b799a081a8d1cd429c4cc09c2b1" Sep 29 19:26:02 crc kubenswrapper[4779]: I0929 19:26:02.821816 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Sep 29 19:26:02 crc kubenswrapper[4779]: I0929 19:26:02.833299 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Sep 29 19:26:02 crc kubenswrapper[4779]: E0929 19:26:02.833730 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ab4fe873-f58e-4b0f-8d40-1ac1be214b0c" containerName="ceilometer-notification-agent" Sep 29 19:26:02 crc kubenswrapper[4779]: I0929 19:26:02.833756 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="ab4fe873-f58e-4b0f-8d40-1ac1be214b0c" containerName="ceilometer-notification-agent" Sep 29 19:26:02 crc kubenswrapper[4779]: E0929 19:26:02.833785 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ab4fe873-f58e-4b0f-8d40-1ac1be214b0c" containerName="sg-core" Sep 29 19:26:02 crc kubenswrapper[4779]: I0929 19:26:02.833794 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="ab4fe873-f58e-4b0f-8d40-1ac1be214b0c" containerName="sg-core" Sep 29 19:26:02 crc kubenswrapper[4779]: I0929 19:26:02.834015 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="ab4fe873-f58e-4b0f-8d40-1ac1be214b0c" containerName="ceilometer-notification-agent" Sep 29 19:26:02 crc kubenswrapper[4779]: I0929 19:26:02.834042 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="ab4fe873-f58e-4b0f-8d40-1ac1be214b0c" containerName="sg-core" Sep 29 19:26:02 crc kubenswrapper[4779]: I0929 19:26:02.835800 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 19:26:02 crc kubenswrapper[4779]: I0929 19:26:02.838469 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Sep 29 19:26:02 crc kubenswrapper[4779]: I0929 19:26:02.838592 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Sep 29 19:26:02 crc kubenswrapper[4779]: I0929 19:26:02.839969 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 29 19:26:02 crc kubenswrapper[4779]: I0929 19:26:02.951505 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2e4b85f8-60be-4e1d-9fcd-a227efd711fd-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"2e4b85f8-60be-4e1d-9fcd-a227efd711fd\") " pod="openstack/ceilometer-0" Sep 29 19:26:02 crc kubenswrapper[4779]: I0929 19:26:02.951859 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2e4b85f8-60be-4e1d-9fcd-a227efd711fd-scripts\") pod \"ceilometer-0\" (UID: \"2e4b85f8-60be-4e1d-9fcd-a227efd711fd\") " pod="openstack/ceilometer-0" Sep 29 19:26:02 crc kubenswrapper[4779]: I0929 19:26:02.951897 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2e4b85f8-60be-4e1d-9fcd-a227efd711fd-config-data\") pod \"ceilometer-0\" (UID: \"2e4b85f8-60be-4e1d-9fcd-a227efd711fd\") " pod="openstack/ceilometer-0" Sep 29 19:26:02 crc kubenswrapper[4779]: I0929 19:26:02.951933 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2e4b85f8-60be-4e1d-9fcd-a227efd711fd-run-httpd\") pod \"ceilometer-0\" (UID: \"2e4b85f8-60be-4e1d-9fcd-a227efd711fd\") " pod="openstack/ceilometer-0" Sep 29 19:26:02 crc kubenswrapper[4779]: I0929 19:26:02.952099 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/2e4b85f8-60be-4e1d-9fcd-a227efd711fd-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"2e4b85f8-60be-4e1d-9fcd-a227efd711fd\") " pod="openstack/ceilometer-0" Sep 29 19:26:02 crc kubenswrapper[4779]: I0929 19:26:02.952153 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fknxd\" (UniqueName: \"kubernetes.io/projected/2e4b85f8-60be-4e1d-9fcd-a227efd711fd-kube-api-access-fknxd\") pod \"ceilometer-0\" (UID: \"2e4b85f8-60be-4e1d-9fcd-a227efd711fd\") " pod="openstack/ceilometer-0" Sep 29 19:26:02 crc kubenswrapper[4779]: I0929 19:26:02.952192 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2e4b85f8-60be-4e1d-9fcd-a227efd711fd-log-httpd\") pod \"ceilometer-0\" (UID: \"2e4b85f8-60be-4e1d-9fcd-a227efd711fd\") " pod="openstack/ceilometer-0" Sep 29 19:26:03 crc kubenswrapper[4779]: I0929 19:26:03.036453 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-6rvdb" Sep 29 19:26:03 crc kubenswrapper[4779]: I0929 19:26:03.053797 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2e4b85f8-60be-4e1d-9fcd-a227efd711fd-config-data\") pod \"ceilometer-0\" (UID: \"2e4b85f8-60be-4e1d-9fcd-a227efd711fd\") " pod="openstack/ceilometer-0" Sep 29 19:26:03 crc kubenswrapper[4779]: I0929 19:26:03.053878 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2e4b85f8-60be-4e1d-9fcd-a227efd711fd-run-httpd\") pod \"ceilometer-0\" (UID: \"2e4b85f8-60be-4e1d-9fcd-a227efd711fd\") " pod="openstack/ceilometer-0" Sep 29 19:26:03 crc kubenswrapper[4779]: I0929 19:26:03.053977 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/2e4b85f8-60be-4e1d-9fcd-a227efd711fd-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"2e4b85f8-60be-4e1d-9fcd-a227efd711fd\") " pod="openstack/ceilometer-0" Sep 29 19:26:03 crc kubenswrapper[4779]: I0929 19:26:03.054024 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fknxd\" (UniqueName: \"kubernetes.io/projected/2e4b85f8-60be-4e1d-9fcd-a227efd711fd-kube-api-access-fknxd\") pod \"ceilometer-0\" (UID: \"2e4b85f8-60be-4e1d-9fcd-a227efd711fd\") " pod="openstack/ceilometer-0" Sep 29 19:26:03 crc kubenswrapper[4779]: I0929 19:26:03.054079 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2e4b85f8-60be-4e1d-9fcd-a227efd711fd-log-httpd\") pod \"ceilometer-0\" (UID: \"2e4b85f8-60be-4e1d-9fcd-a227efd711fd\") " pod="openstack/ceilometer-0" Sep 29 19:26:03 crc kubenswrapper[4779]: I0929 19:26:03.054235 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2e4b85f8-60be-4e1d-9fcd-a227efd711fd-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"2e4b85f8-60be-4e1d-9fcd-a227efd711fd\") " pod="openstack/ceilometer-0" Sep 29 19:26:03 crc kubenswrapper[4779]: I0929 19:26:03.054276 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2e4b85f8-60be-4e1d-9fcd-a227efd711fd-scripts\") pod \"ceilometer-0\" (UID: \"2e4b85f8-60be-4e1d-9fcd-a227efd711fd\") " pod="openstack/ceilometer-0" Sep 29 19:26:03 crc kubenswrapper[4779]: I0929 19:26:03.055516 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2e4b85f8-60be-4e1d-9fcd-a227efd711fd-log-httpd\") pod \"ceilometer-0\" (UID: \"2e4b85f8-60be-4e1d-9fcd-a227efd711fd\") " pod="openstack/ceilometer-0" Sep 29 19:26:03 crc kubenswrapper[4779]: I0929 19:26:03.055632 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2e4b85f8-60be-4e1d-9fcd-a227efd711fd-run-httpd\") pod \"ceilometer-0\" (UID: \"2e4b85f8-60be-4e1d-9fcd-a227efd711fd\") " pod="openstack/ceilometer-0" Sep 29 19:26:03 crc kubenswrapper[4779]: I0929 19:26:03.062149 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/2e4b85f8-60be-4e1d-9fcd-a227efd711fd-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"2e4b85f8-60be-4e1d-9fcd-a227efd711fd\") " pod="openstack/ceilometer-0" Sep 29 19:26:03 crc kubenswrapper[4779]: I0929 19:26:03.062532 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2e4b85f8-60be-4e1d-9fcd-a227efd711fd-config-data\") pod \"ceilometer-0\" (UID: \"2e4b85f8-60be-4e1d-9fcd-a227efd711fd\") " pod="openstack/ceilometer-0" Sep 29 19:26:03 crc kubenswrapper[4779]: I0929 19:26:03.066136 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2e4b85f8-60be-4e1d-9fcd-a227efd711fd-scripts\") pod \"ceilometer-0\" (UID: \"2e4b85f8-60be-4e1d-9fcd-a227efd711fd\") " pod="openstack/ceilometer-0" Sep 29 19:26:03 crc kubenswrapper[4779]: I0929 19:26:03.066959 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2e4b85f8-60be-4e1d-9fcd-a227efd711fd-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"2e4b85f8-60be-4e1d-9fcd-a227efd711fd\") " pod="openstack/ceilometer-0" Sep 29 19:26:03 crc kubenswrapper[4779]: I0929 19:26:03.081142 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fknxd\" (UniqueName: \"kubernetes.io/projected/2e4b85f8-60be-4e1d-9fcd-a227efd711fd-kube-api-access-fknxd\") pod \"ceilometer-0\" (UID: \"2e4b85f8-60be-4e1d-9fcd-a227efd711fd\") " pod="openstack/ceilometer-0" Sep 29 19:26:03 crc kubenswrapper[4779]: I0929 19:26:03.151973 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 19:26:03 crc kubenswrapper[4779]: I0929 19:26:03.155889 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/f4c19704-2fd1-4d08-a947-f80d1d84f543-config\") pod \"f4c19704-2fd1-4d08-a947-f80d1d84f543\" (UID: \"f4c19704-2fd1-4d08-a947-f80d1d84f543\") " Sep 29 19:26:03 crc kubenswrapper[4779]: I0929 19:26:03.155981 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-46hzq\" (UniqueName: \"kubernetes.io/projected/f4c19704-2fd1-4d08-a947-f80d1d84f543-kube-api-access-46hzq\") pod \"f4c19704-2fd1-4d08-a947-f80d1d84f543\" (UID: \"f4c19704-2fd1-4d08-a947-f80d1d84f543\") " Sep 29 19:26:03 crc kubenswrapper[4779]: I0929 19:26:03.156029 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f4c19704-2fd1-4d08-a947-f80d1d84f543-combined-ca-bundle\") pod \"f4c19704-2fd1-4d08-a947-f80d1d84f543\" (UID: \"f4c19704-2fd1-4d08-a947-f80d1d84f543\") " Sep 29 19:26:03 crc kubenswrapper[4779]: I0929 19:26:03.160963 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f4c19704-2fd1-4d08-a947-f80d1d84f543-kube-api-access-46hzq" (OuterVolumeSpecName: "kube-api-access-46hzq") pod "f4c19704-2fd1-4d08-a947-f80d1d84f543" (UID: "f4c19704-2fd1-4d08-a947-f80d1d84f543"). InnerVolumeSpecName "kube-api-access-46hzq". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:26:03 crc kubenswrapper[4779]: I0929 19:26:03.179561 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f4c19704-2fd1-4d08-a947-f80d1d84f543-config" (OuterVolumeSpecName: "config") pod "f4c19704-2fd1-4d08-a947-f80d1d84f543" (UID: "f4c19704-2fd1-4d08-a947-f80d1d84f543"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:26:03 crc kubenswrapper[4779]: I0929 19:26:03.180918 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f4c19704-2fd1-4d08-a947-f80d1d84f543-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f4c19704-2fd1-4d08-a947-f80d1d84f543" (UID: "f4c19704-2fd1-4d08-a947-f80d1d84f543"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:26:03 crc kubenswrapper[4779]: I0929 19:26:03.260086 4779 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/f4c19704-2fd1-4d08-a947-f80d1d84f543-config\") on node \"crc\" DevicePath \"\"" Sep 29 19:26:03 crc kubenswrapper[4779]: I0929 19:26:03.260116 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-46hzq\" (UniqueName: \"kubernetes.io/projected/f4c19704-2fd1-4d08-a947-f80d1d84f543-kube-api-access-46hzq\") on node \"crc\" DevicePath \"\"" Sep 29 19:26:03 crc kubenswrapper[4779]: I0929 19:26:03.260128 4779 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f4c19704-2fd1-4d08-a947-f80d1d84f543-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 19:26:03 crc kubenswrapper[4779]: W0929 19:26:03.580136 4779 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2e4b85f8_60be_4e1d_9fcd_a227efd711fd.slice/crio-057f5ff143d56aeacd20a70641c4ec34d577fc072cf73c863abf7ddf5f3514e4 WatchSource:0}: Error finding container 057f5ff143d56aeacd20a70641c4ec34d577fc072cf73c863abf7ddf5f3514e4: Status 404 returned error can't find the container with id 057f5ff143d56aeacd20a70641c4ec34d577fc072cf73c863abf7ddf5f3514e4 Sep 29 19:26:03 crc kubenswrapper[4779]: I0929 19:26:03.581673 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 29 19:26:03 crc kubenswrapper[4779]: I0929 19:26:03.703815 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-6rvdb" event={"ID":"f4c19704-2fd1-4d08-a947-f80d1d84f543","Type":"ContainerDied","Data":"7cc561e2e075be5c81b19239c6367abe999b4271ce215b454a6aec4de3f8469c"} Sep 29 19:26:03 crc kubenswrapper[4779]: I0929 19:26:03.703866 4779 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7cc561e2e075be5c81b19239c6367abe999b4271ce215b454a6aec4de3f8469c" Sep 29 19:26:03 crc kubenswrapper[4779]: I0929 19:26:03.703923 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-6rvdb" Sep 29 19:26:03 crc kubenswrapper[4779]: I0929 19:26:03.707035 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"2e4b85f8-60be-4e1d-9fcd-a227efd711fd","Type":"ContainerStarted","Data":"057f5ff143d56aeacd20a70641c4ec34d577fc072cf73c863abf7ddf5f3514e4"} Sep 29 19:26:03 crc kubenswrapper[4779]: I0929 19:26:03.790230 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ab4fe873-f58e-4b0f-8d40-1ac1be214b0c" path="/var/lib/kubelet/pods/ab4fe873-f58e-4b0f-8d40-1ac1be214b0c/volumes" Sep 29 19:26:03 crc kubenswrapper[4779]: I0929 19:26:03.904709 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-84b966f6c9-vmh6c"] Sep 29 19:26:03 crc kubenswrapper[4779]: E0929 19:26:03.905035 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4c19704-2fd1-4d08-a947-f80d1d84f543" containerName="neutron-db-sync" Sep 29 19:26:03 crc kubenswrapper[4779]: I0929 19:26:03.905050 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4c19704-2fd1-4d08-a947-f80d1d84f543" containerName="neutron-db-sync" Sep 29 19:26:03 crc kubenswrapper[4779]: I0929 19:26:03.905246 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4c19704-2fd1-4d08-a947-f80d1d84f543" containerName="neutron-db-sync" Sep 29 19:26:03 crc kubenswrapper[4779]: I0929 19:26:03.906049 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-84b966f6c9-vmh6c" Sep 29 19:26:03 crc kubenswrapper[4779]: I0929 19:26:03.927572 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-84b966f6c9-vmh6c"] Sep 29 19:26:04 crc kubenswrapper[4779]: I0929 19:26:04.002725 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-6656fdb884-9h5nb"] Sep 29 19:26:04 crc kubenswrapper[4779]: I0929 19:26:04.010501 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-6656fdb884-9h5nb" Sep 29 19:26:04 crc kubenswrapper[4779]: I0929 19:26:04.019566 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-6656fdb884-9h5nb"] Sep 29 19:26:04 crc kubenswrapper[4779]: I0929 19:26:04.021502 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-neutron-dockercfg-7jktj" Sep 29 19:26:04 crc kubenswrapper[4779]: I0929 19:26:04.021762 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-config" Sep 29 19:26:04 crc kubenswrapper[4779]: I0929 19:26:04.024497 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-httpd-config" Sep 29 19:26:04 crc kubenswrapper[4779]: I0929 19:26:04.024783 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-ovndbs" Sep 29 19:26:04 crc kubenswrapper[4779]: I0929 19:26:04.080966 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mgv9k\" (UniqueName: \"kubernetes.io/projected/7d6613f2-eca3-41c7-86c4-7c2726764f27-kube-api-access-mgv9k\") pod \"dnsmasq-dns-84b966f6c9-vmh6c\" (UID: \"7d6613f2-eca3-41c7-86c4-7c2726764f27\") " pod="openstack/dnsmasq-dns-84b966f6c9-vmh6c" Sep 29 19:26:04 crc kubenswrapper[4779]: I0929 19:26:04.081029 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/7d6613f2-eca3-41c7-86c4-7c2726764f27-ovsdbserver-sb\") pod \"dnsmasq-dns-84b966f6c9-vmh6c\" (UID: \"7d6613f2-eca3-41c7-86c4-7c2726764f27\") " pod="openstack/dnsmasq-dns-84b966f6c9-vmh6c" Sep 29 19:26:04 crc kubenswrapper[4779]: I0929 19:26:04.081068 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7d6613f2-eca3-41c7-86c4-7c2726764f27-config\") pod \"dnsmasq-dns-84b966f6c9-vmh6c\" (UID: \"7d6613f2-eca3-41c7-86c4-7c2726764f27\") " pod="openstack/dnsmasq-dns-84b966f6c9-vmh6c" Sep 29 19:26:04 crc kubenswrapper[4779]: I0929 19:26:04.081141 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/7d6613f2-eca3-41c7-86c4-7c2726764f27-ovsdbserver-nb\") pod \"dnsmasq-dns-84b966f6c9-vmh6c\" (UID: \"7d6613f2-eca3-41c7-86c4-7c2726764f27\") " pod="openstack/dnsmasq-dns-84b966f6c9-vmh6c" Sep 29 19:26:04 crc kubenswrapper[4779]: I0929 19:26:04.081189 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7d6613f2-eca3-41c7-86c4-7c2726764f27-dns-svc\") pod \"dnsmasq-dns-84b966f6c9-vmh6c\" (UID: \"7d6613f2-eca3-41c7-86c4-7c2726764f27\") " pod="openstack/dnsmasq-dns-84b966f6c9-vmh6c" Sep 29 19:26:04 crc kubenswrapper[4779]: I0929 19:26:04.081248 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/7d6613f2-eca3-41c7-86c4-7c2726764f27-dns-swift-storage-0\") pod \"dnsmasq-dns-84b966f6c9-vmh6c\" (UID: \"7d6613f2-eca3-41c7-86c4-7c2726764f27\") " pod="openstack/dnsmasq-dns-84b966f6c9-vmh6c" Sep 29 19:26:04 crc kubenswrapper[4779]: I0929 19:26:04.182334 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/64e12e10-e0f1-4706-b0a2-ec78ce9921a1-httpd-config\") pod \"neutron-6656fdb884-9h5nb\" (UID: \"64e12e10-e0f1-4706-b0a2-ec78ce9921a1\") " pod="openstack/neutron-6656fdb884-9h5nb" Sep 29 19:26:04 crc kubenswrapper[4779]: I0929 19:26:04.182426 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/7d6613f2-eca3-41c7-86c4-7c2726764f27-ovsdbserver-nb\") pod \"dnsmasq-dns-84b966f6c9-vmh6c\" (UID: \"7d6613f2-eca3-41c7-86c4-7c2726764f27\") " pod="openstack/dnsmasq-dns-84b966f6c9-vmh6c" Sep 29 19:26:04 crc kubenswrapper[4779]: I0929 19:26:04.182479 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7d6613f2-eca3-41c7-86c4-7c2726764f27-dns-svc\") pod \"dnsmasq-dns-84b966f6c9-vmh6c\" (UID: \"7d6613f2-eca3-41c7-86c4-7c2726764f27\") " pod="openstack/dnsmasq-dns-84b966f6c9-vmh6c" Sep 29 19:26:04 crc kubenswrapper[4779]: I0929 19:26:04.182548 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/7d6613f2-eca3-41c7-86c4-7c2726764f27-dns-swift-storage-0\") pod \"dnsmasq-dns-84b966f6c9-vmh6c\" (UID: \"7d6613f2-eca3-41c7-86c4-7c2726764f27\") " pod="openstack/dnsmasq-dns-84b966f6c9-vmh6c" Sep 29 19:26:04 crc kubenswrapper[4779]: I0929 19:26:04.182591 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/64e12e10-e0f1-4706-b0a2-ec78ce9921a1-config\") pod \"neutron-6656fdb884-9h5nb\" (UID: \"64e12e10-e0f1-4706-b0a2-ec78ce9921a1\") " pod="openstack/neutron-6656fdb884-9h5nb" Sep 29 19:26:04 crc kubenswrapper[4779]: I0929 19:26:04.182770 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mgv9k\" (UniqueName: \"kubernetes.io/projected/7d6613f2-eca3-41c7-86c4-7c2726764f27-kube-api-access-mgv9k\") pod \"dnsmasq-dns-84b966f6c9-vmh6c\" (UID: \"7d6613f2-eca3-41c7-86c4-7c2726764f27\") " pod="openstack/dnsmasq-dns-84b966f6c9-vmh6c" Sep 29 19:26:04 crc kubenswrapper[4779]: I0929 19:26:04.182797 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/64e12e10-e0f1-4706-b0a2-ec78ce9921a1-combined-ca-bundle\") pod \"neutron-6656fdb884-9h5nb\" (UID: \"64e12e10-e0f1-4706-b0a2-ec78ce9921a1\") " pod="openstack/neutron-6656fdb884-9h5nb" Sep 29 19:26:04 crc kubenswrapper[4779]: I0929 19:26:04.182831 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/7d6613f2-eca3-41c7-86c4-7c2726764f27-ovsdbserver-sb\") pod \"dnsmasq-dns-84b966f6c9-vmh6c\" (UID: \"7d6613f2-eca3-41c7-86c4-7c2726764f27\") " pod="openstack/dnsmasq-dns-84b966f6c9-vmh6c" Sep 29 19:26:04 crc kubenswrapper[4779]: I0929 19:26:04.182855 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/64e12e10-e0f1-4706-b0a2-ec78ce9921a1-ovndb-tls-certs\") pod \"neutron-6656fdb884-9h5nb\" (UID: \"64e12e10-e0f1-4706-b0a2-ec78ce9921a1\") " pod="openstack/neutron-6656fdb884-9h5nb" Sep 29 19:26:04 crc kubenswrapper[4779]: I0929 19:26:04.182890 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b4zqn\" (UniqueName: \"kubernetes.io/projected/64e12e10-e0f1-4706-b0a2-ec78ce9921a1-kube-api-access-b4zqn\") pod \"neutron-6656fdb884-9h5nb\" (UID: \"64e12e10-e0f1-4706-b0a2-ec78ce9921a1\") " pod="openstack/neutron-6656fdb884-9h5nb" Sep 29 19:26:04 crc kubenswrapper[4779]: I0929 19:26:04.182915 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7d6613f2-eca3-41c7-86c4-7c2726764f27-config\") pod \"dnsmasq-dns-84b966f6c9-vmh6c\" (UID: \"7d6613f2-eca3-41c7-86c4-7c2726764f27\") " pod="openstack/dnsmasq-dns-84b966f6c9-vmh6c" Sep 29 19:26:04 crc kubenswrapper[4779]: I0929 19:26:04.186559 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/7d6613f2-eca3-41c7-86c4-7c2726764f27-ovsdbserver-sb\") pod \"dnsmasq-dns-84b966f6c9-vmh6c\" (UID: \"7d6613f2-eca3-41c7-86c4-7c2726764f27\") " pod="openstack/dnsmasq-dns-84b966f6c9-vmh6c" Sep 29 19:26:04 crc kubenswrapper[4779]: I0929 19:26:04.186559 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/7d6613f2-eca3-41c7-86c4-7c2726764f27-ovsdbserver-nb\") pod \"dnsmasq-dns-84b966f6c9-vmh6c\" (UID: \"7d6613f2-eca3-41c7-86c4-7c2726764f27\") " pod="openstack/dnsmasq-dns-84b966f6c9-vmh6c" Sep 29 19:26:04 crc kubenswrapper[4779]: I0929 19:26:04.186594 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/7d6613f2-eca3-41c7-86c4-7c2726764f27-dns-swift-storage-0\") pod \"dnsmasq-dns-84b966f6c9-vmh6c\" (UID: \"7d6613f2-eca3-41c7-86c4-7c2726764f27\") " pod="openstack/dnsmasq-dns-84b966f6c9-vmh6c" Sep 29 19:26:04 crc kubenswrapper[4779]: I0929 19:26:04.187076 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7d6613f2-eca3-41c7-86c4-7c2726764f27-dns-svc\") pod \"dnsmasq-dns-84b966f6c9-vmh6c\" (UID: \"7d6613f2-eca3-41c7-86c4-7c2726764f27\") " pod="openstack/dnsmasq-dns-84b966f6c9-vmh6c" Sep 29 19:26:04 crc kubenswrapper[4779]: I0929 19:26:04.187099 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7d6613f2-eca3-41c7-86c4-7c2726764f27-config\") pod \"dnsmasq-dns-84b966f6c9-vmh6c\" (UID: \"7d6613f2-eca3-41c7-86c4-7c2726764f27\") " pod="openstack/dnsmasq-dns-84b966f6c9-vmh6c" Sep 29 19:26:04 crc kubenswrapper[4779]: I0929 19:26:04.218192 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mgv9k\" (UniqueName: \"kubernetes.io/projected/7d6613f2-eca3-41c7-86c4-7c2726764f27-kube-api-access-mgv9k\") pod \"dnsmasq-dns-84b966f6c9-vmh6c\" (UID: \"7d6613f2-eca3-41c7-86c4-7c2726764f27\") " pod="openstack/dnsmasq-dns-84b966f6c9-vmh6c" Sep 29 19:26:04 crc kubenswrapper[4779]: I0929 19:26:04.230327 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-84b966f6c9-vmh6c" Sep 29 19:26:04 crc kubenswrapper[4779]: I0929 19:26:04.284614 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/64e12e10-e0f1-4706-b0a2-ec78ce9921a1-config\") pod \"neutron-6656fdb884-9h5nb\" (UID: \"64e12e10-e0f1-4706-b0a2-ec78ce9921a1\") " pod="openstack/neutron-6656fdb884-9h5nb" Sep 29 19:26:04 crc kubenswrapper[4779]: I0929 19:26:04.284833 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/64e12e10-e0f1-4706-b0a2-ec78ce9921a1-combined-ca-bundle\") pod \"neutron-6656fdb884-9h5nb\" (UID: \"64e12e10-e0f1-4706-b0a2-ec78ce9921a1\") " pod="openstack/neutron-6656fdb884-9h5nb" Sep 29 19:26:04 crc kubenswrapper[4779]: I0929 19:26:04.284935 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/64e12e10-e0f1-4706-b0a2-ec78ce9921a1-ovndb-tls-certs\") pod \"neutron-6656fdb884-9h5nb\" (UID: \"64e12e10-e0f1-4706-b0a2-ec78ce9921a1\") " pod="openstack/neutron-6656fdb884-9h5nb" Sep 29 19:26:04 crc kubenswrapper[4779]: I0929 19:26:04.285110 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b4zqn\" (UniqueName: \"kubernetes.io/projected/64e12e10-e0f1-4706-b0a2-ec78ce9921a1-kube-api-access-b4zqn\") pod \"neutron-6656fdb884-9h5nb\" (UID: \"64e12e10-e0f1-4706-b0a2-ec78ce9921a1\") " pod="openstack/neutron-6656fdb884-9h5nb" Sep 29 19:26:04 crc kubenswrapper[4779]: I0929 19:26:04.285348 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/64e12e10-e0f1-4706-b0a2-ec78ce9921a1-httpd-config\") pod \"neutron-6656fdb884-9h5nb\" (UID: \"64e12e10-e0f1-4706-b0a2-ec78ce9921a1\") " pod="openstack/neutron-6656fdb884-9h5nb" Sep 29 19:26:04 crc kubenswrapper[4779]: I0929 19:26:04.292071 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/64e12e10-e0f1-4706-b0a2-ec78ce9921a1-ovndb-tls-certs\") pod \"neutron-6656fdb884-9h5nb\" (UID: \"64e12e10-e0f1-4706-b0a2-ec78ce9921a1\") " pod="openstack/neutron-6656fdb884-9h5nb" Sep 29 19:26:04 crc kubenswrapper[4779]: I0929 19:26:04.293370 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/64e12e10-e0f1-4706-b0a2-ec78ce9921a1-httpd-config\") pod \"neutron-6656fdb884-9h5nb\" (UID: \"64e12e10-e0f1-4706-b0a2-ec78ce9921a1\") " pod="openstack/neutron-6656fdb884-9h5nb" Sep 29 19:26:04 crc kubenswrapper[4779]: I0929 19:26:04.303207 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/64e12e10-e0f1-4706-b0a2-ec78ce9921a1-combined-ca-bundle\") pod \"neutron-6656fdb884-9h5nb\" (UID: \"64e12e10-e0f1-4706-b0a2-ec78ce9921a1\") " pod="openstack/neutron-6656fdb884-9h5nb" Sep 29 19:26:04 crc kubenswrapper[4779]: I0929 19:26:04.305930 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b4zqn\" (UniqueName: \"kubernetes.io/projected/64e12e10-e0f1-4706-b0a2-ec78ce9921a1-kube-api-access-b4zqn\") pod \"neutron-6656fdb884-9h5nb\" (UID: \"64e12e10-e0f1-4706-b0a2-ec78ce9921a1\") " pod="openstack/neutron-6656fdb884-9h5nb" Sep 29 19:26:04 crc kubenswrapper[4779]: I0929 19:26:04.307202 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/64e12e10-e0f1-4706-b0a2-ec78ce9921a1-config\") pod \"neutron-6656fdb884-9h5nb\" (UID: \"64e12e10-e0f1-4706-b0a2-ec78ce9921a1\") " pod="openstack/neutron-6656fdb884-9h5nb" Sep 29 19:26:04 crc kubenswrapper[4779]: I0929 19:26:04.369568 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-6656fdb884-9h5nb" Sep 29 19:26:04 crc kubenswrapper[4779]: I0929 19:26:04.734266 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-84b966f6c9-vmh6c"] Sep 29 19:26:05 crc kubenswrapper[4779]: I0929 19:26:05.723037 4779 generic.go:334] "Generic (PLEG): container finished" podID="7d6613f2-eca3-41c7-86c4-7c2726764f27" containerID="8ba250f01f3609f9e9c585a55b2e7c0ff35f0f61c7d628a8eb36f4e8e172ecd0" exitCode=0 Sep 29 19:26:05 crc kubenswrapper[4779]: I0929 19:26:05.723144 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-84b966f6c9-vmh6c" event={"ID":"7d6613f2-eca3-41c7-86c4-7c2726764f27","Type":"ContainerDied","Data":"8ba250f01f3609f9e9c585a55b2e7c0ff35f0f61c7d628a8eb36f4e8e172ecd0"} Sep 29 19:26:05 crc kubenswrapper[4779]: I0929 19:26:05.723665 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-84b966f6c9-vmh6c" event={"ID":"7d6613f2-eca3-41c7-86c4-7c2726764f27","Type":"ContainerStarted","Data":"b044cafb324a3281fc06e771f9206aa213e5a0414e51eac1a2b1bef61c8111ec"} Sep 29 19:26:05 crc kubenswrapper[4779]: I0929 19:26:05.727656 4779 generic.go:334] "Generic (PLEG): container finished" podID="8ac1ccdd-a13c-4f3c-9e06-731454c5f58d" containerID="2d7978690ffe9b5b89ea08a4a5420238484738ad683c853f66a8429d474249a3" exitCode=137 Sep 29 19:26:05 crc kubenswrapper[4779]: I0929 19:26:05.727703 4779 generic.go:334] "Generic (PLEG): container finished" podID="8ac1ccdd-a13c-4f3c-9e06-731454c5f58d" containerID="28c041be6ca4d08a38ed85b2b19f3ce8abf070551e701d0eba1bbea6f712dd6c" exitCode=137 Sep 29 19:26:05 crc kubenswrapper[4779]: I0929 19:26:05.727722 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-6c8fdd957c-ft6jd" event={"ID":"8ac1ccdd-a13c-4f3c-9e06-731454c5f58d","Type":"ContainerDied","Data":"2d7978690ffe9b5b89ea08a4a5420238484738ad683c853f66a8429d474249a3"} Sep 29 19:26:05 crc kubenswrapper[4779]: I0929 19:26:05.727743 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-6c8fdd957c-ft6jd" event={"ID":"8ac1ccdd-a13c-4f3c-9e06-731454c5f58d","Type":"ContainerDied","Data":"28c041be6ca4d08a38ed85b2b19f3ce8abf070551e701d0eba1bbea6f712dd6c"} Sep 29 19:26:05 crc kubenswrapper[4779]: I0929 19:26:05.959059 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-6656fdb884-9h5nb"] Sep 29 19:26:06 crc kubenswrapper[4779]: I0929 19:26:06.132453 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-6c8fdd957c-ft6jd" Sep 29 19:26:06 crc kubenswrapper[4779]: I0929 19:26:06.176741 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/horizon-fc6fd7df6-btpzz" Sep 29 19:26:06 crc kubenswrapper[4779]: I0929 19:26:06.240488 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/horizon-6f7f5b6d48-8js86" Sep 29 19:26:06 crc kubenswrapper[4779]: I0929 19:26:06.248759 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/8ac1ccdd-a13c-4f3c-9e06-731454c5f58d-horizon-secret-key\") pod \"8ac1ccdd-a13c-4f3c-9e06-731454c5f58d\" (UID: \"8ac1ccdd-a13c-4f3c-9e06-731454c5f58d\") " Sep 29 19:26:06 crc kubenswrapper[4779]: I0929 19:26:06.248812 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/8ac1ccdd-a13c-4f3c-9e06-731454c5f58d-config-data\") pod \"8ac1ccdd-a13c-4f3c-9e06-731454c5f58d\" (UID: \"8ac1ccdd-a13c-4f3c-9e06-731454c5f58d\") " Sep 29 19:26:06 crc kubenswrapper[4779]: I0929 19:26:06.248852 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-l6fmv\" (UniqueName: \"kubernetes.io/projected/8ac1ccdd-a13c-4f3c-9e06-731454c5f58d-kube-api-access-l6fmv\") pod \"8ac1ccdd-a13c-4f3c-9e06-731454c5f58d\" (UID: \"8ac1ccdd-a13c-4f3c-9e06-731454c5f58d\") " Sep 29 19:26:06 crc kubenswrapper[4779]: I0929 19:26:06.248931 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/8ac1ccdd-a13c-4f3c-9e06-731454c5f58d-scripts\") pod \"8ac1ccdd-a13c-4f3c-9e06-731454c5f58d\" (UID: \"8ac1ccdd-a13c-4f3c-9e06-731454c5f58d\") " Sep 29 19:26:06 crc kubenswrapper[4779]: I0929 19:26:06.249016 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8ac1ccdd-a13c-4f3c-9e06-731454c5f58d-logs\") pod \"8ac1ccdd-a13c-4f3c-9e06-731454c5f58d\" (UID: \"8ac1ccdd-a13c-4f3c-9e06-731454c5f58d\") " Sep 29 19:26:06 crc kubenswrapper[4779]: I0929 19:26:06.250333 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8ac1ccdd-a13c-4f3c-9e06-731454c5f58d-logs" (OuterVolumeSpecName: "logs") pod "8ac1ccdd-a13c-4f3c-9e06-731454c5f58d" (UID: "8ac1ccdd-a13c-4f3c-9e06-731454c5f58d"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 19:26:06 crc kubenswrapper[4779]: I0929 19:26:06.256395 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8ac1ccdd-a13c-4f3c-9e06-731454c5f58d-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "8ac1ccdd-a13c-4f3c-9e06-731454c5f58d" (UID: "8ac1ccdd-a13c-4f3c-9e06-731454c5f58d"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:26:06 crc kubenswrapper[4779]: I0929 19:26:06.256508 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8ac1ccdd-a13c-4f3c-9e06-731454c5f58d-kube-api-access-l6fmv" (OuterVolumeSpecName: "kube-api-access-l6fmv") pod "8ac1ccdd-a13c-4f3c-9e06-731454c5f58d" (UID: "8ac1ccdd-a13c-4f3c-9e06-731454c5f58d"). InnerVolumeSpecName "kube-api-access-l6fmv". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:26:06 crc kubenswrapper[4779]: I0929 19:26:06.278053 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8ac1ccdd-a13c-4f3c-9e06-731454c5f58d-config-data" (OuterVolumeSpecName: "config-data") pod "8ac1ccdd-a13c-4f3c-9e06-731454c5f58d" (UID: "8ac1ccdd-a13c-4f3c-9e06-731454c5f58d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:26:06 crc kubenswrapper[4779]: I0929 19:26:06.287820 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8ac1ccdd-a13c-4f3c-9e06-731454c5f58d-scripts" (OuterVolumeSpecName: "scripts") pod "8ac1ccdd-a13c-4f3c-9e06-731454c5f58d" (UID: "8ac1ccdd-a13c-4f3c-9e06-731454c5f58d"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:26:06 crc kubenswrapper[4779]: I0929 19:26:06.351983 4779 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8ac1ccdd-a13c-4f3c-9e06-731454c5f58d-logs\") on node \"crc\" DevicePath \"\"" Sep 29 19:26:06 crc kubenswrapper[4779]: I0929 19:26:06.353418 4779 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/8ac1ccdd-a13c-4f3c-9e06-731454c5f58d-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Sep 29 19:26:06 crc kubenswrapper[4779]: I0929 19:26:06.353437 4779 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/8ac1ccdd-a13c-4f3c-9e06-731454c5f58d-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 19:26:06 crc kubenswrapper[4779]: I0929 19:26:06.353447 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-l6fmv\" (UniqueName: \"kubernetes.io/projected/8ac1ccdd-a13c-4f3c-9e06-731454c5f58d-kube-api-access-l6fmv\") on node \"crc\" DevicePath \"\"" Sep 29 19:26:06 crc kubenswrapper[4779]: I0929 19:26:06.353457 4779 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/8ac1ccdd-a13c-4f3c-9e06-731454c5f58d-scripts\") on node \"crc\" DevicePath \"\"" Sep 29 19:26:06 crc kubenswrapper[4779]: I0929 19:26:06.738838 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"2e4b85f8-60be-4e1d-9fcd-a227efd711fd","Type":"ContainerStarted","Data":"2987f26cacd371de01cf94d756cd9e2992536d73bea4957c97162122b9b43b8a"} Sep 29 19:26:06 crc kubenswrapper[4779]: I0929 19:26:06.743402 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-6656fdb884-9h5nb" event={"ID":"64e12e10-e0f1-4706-b0a2-ec78ce9921a1","Type":"ContainerStarted","Data":"336940a08ed36f3eff3815dcf956450178f41f48a039feddb49310c234ef6ee2"} Sep 29 19:26:06 crc kubenswrapper[4779]: I0929 19:26:06.743436 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-6656fdb884-9h5nb" event={"ID":"64e12e10-e0f1-4706-b0a2-ec78ce9921a1","Type":"ContainerStarted","Data":"a24a8a0e0e2fdffe46fcebbbaea17fe81b2296d14936502853999c7832cc3700"} Sep 29 19:26:06 crc kubenswrapper[4779]: I0929 19:26:06.743448 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-6656fdb884-9h5nb" event={"ID":"64e12e10-e0f1-4706-b0a2-ec78ce9921a1","Type":"ContainerStarted","Data":"c4ccda0b04cb30f618b4d86978b3ec9f047667eeb74286a77090bdfc5fb03521"} Sep 29 19:26:06 crc kubenswrapper[4779]: I0929 19:26:06.744502 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/neutron-6656fdb884-9h5nb" Sep 29 19:26:06 crc kubenswrapper[4779]: I0929 19:26:06.745507 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-d6758dbc9-fppqt"] Sep 29 19:26:06 crc kubenswrapper[4779]: E0929 19:26:06.745860 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8ac1ccdd-a13c-4f3c-9e06-731454c5f58d" containerName="horizon" Sep 29 19:26:06 crc kubenswrapper[4779]: I0929 19:26:06.745876 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="8ac1ccdd-a13c-4f3c-9e06-731454c5f58d" containerName="horizon" Sep 29 19:26:06 crc kubenswrapper[4779]: E0929 19:26:06.745884 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8ac1ccdd-a13c-4f3c-9e06-731454c5f58d" containerName="horizon-log" Sep 29 19:26:06 crc kubenswrapper[4779]: I0929 19:26:06.745890 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="8ac1ccdd-a13c-4f3c-9e06-731454c5f58d" containerName="horizon-log" Sep 29 19:26:06 crc kubenswrapper[4779]: I0929 19:26:06.746073 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="8ac1ccdd-a13c-4f3c-9e06-731454c5f58d" containerName="horizon-log" Sep 29 19:26:06 crc kubenswrapper[4779]: I0929 19:26:06.746101 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="8ac1ccdd-a13c-4f3c-9e06-731454c5f58d" containerName="horizon" Sep 29 19:26:06 crc kubenswrapper[4779]: I0929 19:26:06.749359 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-d6758dbc9-fppqt" Sep 29 19:26:06 crc kubenswrapper[4779]: I0929 19:26:06.750918 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-internal-svc" Sep 29 19:26:06 crc kubenswrapper[4779]: I0929 19:26:06.751923 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-public-svc" Sep 29 19:26:06 crc kubenswrapper[4779]: I0929 19:26:06.755976 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-6c8fdd957c-ft6jd" event={"ID":"8ac1ccdd-a13c-4f3c-9e06-731454c5f58d","Type":"ContainerDied","Data":"454ebba1ca13cbd1655115b4f6b9309da8ad7b7b164c81456af654ec81f1733e"} Sep 29 19:26:06 crc kubenswrapper[4779]: I0929 19:26:06.756041 4779 scope.go:117] "RemoveContainer" containerID="2d7978690ffe9b5b89ea08a4a5420238484738ad683c853f66a8429d474249a3" Sep 29 19:26:06 crc kubenswrapper[4779]: I0929 19:26:06.756187 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-6c8fdd957c-ft6jd" Sep 29 19:26:06 crc kubenswrapper[4779]: I0929 19:26:06.777081 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-d6758dbc9-fppqt"] Sep 29 19:26:06 crc kubenswrapper[4779]: I0929 19:26:06.779905 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-84b966f6c9-vmh6c" event={"ID":"7d6613f2-eca3-41c7-86c4-7c2726764f27","Type":"ContainerStarted","Data":"b79426598752e61c80296293f7cb5311a69cc052b027318304a4d7dd73d32908"} Sep 29 19:26:06 crc kubenswrapper[4779]: I0929 19:26:06.781201 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-84b966f6c9-vmh6c" Sep 29 19:26:06 crc kubenswrapper[4779]: I0929 19:26:06.788827 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-6656fdb884-9h5nb" podStartSLOduration=3.7888102359999998 podStartE2EDuration="3.788810236s" podCreationTimestamp="2025-09-29 19:26:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 19:26:06.771676261 +0000 UTC m=+1077.656101361" watchObservedRunningTime="2025-09-29 19:26:06.788810236 +0000 UTC m=+1077.673235336" Sep 29 19:26:06 crc kubenswrapper[4779]: I0929 19:26:06.859204 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/3225f7bc-88d9-4d11-a415-e6a421573849-httpd-config\") pod \"neutron-d6758dbc9-fppqt\" (UID: \"3225f7bc-88d9-4d11-a415-e6a421573849\") " pod="openstack/neutron-d6758dbc9-fppqt" Sep 29 19:26:06 crc kubenswrapper[4779]: I0929 19:26:06.859276 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/3225f7bc-88d9-4d11-a415-e6a421573849-public-tls-certs\") pod \"neutron-d6758dbc9-fppqt\" (UID: \"3225f7bc-88d9-4d11-a415-e6a421573849\") " pod="openstack/neutron-d6758dbc9-fppqt" Sep 29 19:26:06 crc kubenswrapper[4779]: I0929 19:26:06.859334 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/3225f7bc-88d9-4d11-a415-e6a421573849-internal-tls-certs\") pod \"neutron-d6758dbc9-fppqt\" (UID: \"3225f7bc-88d9-4d11-a415-e6a421573849\") " pod="openstack/neutron-d6758dbc9-fppqt" Sep 29 19:26:06 crc kubenswrapper[4779]: I0929 19:26:06.861380 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3225f7bc-88d9-4d11-a415-e6a421573849-combined-ca-bundle\") pod \"neutron-d6758dbc9-fppqt\" (UID: \"3225f7bc-88d9-4d11-a415-e6a421573849\") " pod="openstack/neutron-d6758dbc9-fppqt" Sep 29 19:26:06 crc kubenswrapper[4779]: I0929 19:26:06.861469 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w9kkp\" (UniqueName: \"kubernetes.io/projected/3225f7bc-88d9-4d11-a415-e6a421573849-kube-api-access-w9kkp\") pod \"neutron-d6758dbc9-fppqt\" (UID: \"3225f7bc-88d9-4d11-a415-e6a421573849\") " pod="openstack/neutron-d6758dbc9-fppqt" Sep 29 19:26:06 crc kubenswrapper[4779]: I0929 19:26:06.861715 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/3225f7bc-88d9-4d11-a415-e6a421573849-config\") pod \"neutron-d6758dbc9-fppqt\" (UID: \"3225f7bc-88d9-4d11-a415-e6a421573849\") " pod="openstack/neutron-d6758dbc9-fppqt" Sep 29 19:26:06 crc kubenswrapper[4779]: I0929 19:26:06.861746 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/3225f7bc-88d9-4d11-a415-e6a421573849-ovndb-tls-certs\") pod \"neutron-d6758dbc9-fppqt\" (UID: \"3225f7bc-88d9-4d11-a415-e6a421573849\") " pod="openstack/neutron-d6758dbc9-fppqt" Sep 29 19:26:06 crc kubenswrapper[4779]: I0929 19:26:06.872023 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-84b966f6c9-vmh6c" podStartSLOduration=3.8720027740000003 podStartE2EDuration="3.872002774s" podCreationTimestamp="2025-09-29 19:26:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 19:26:06.851992471 +0000 UTC m=+1077.736417591" watchObservedRunningTime="2025-09-29 19:26:06.872002774 +0000 UTC m=+1077.756427874" Sep 29 19:26:06 crc kubenswrapper[4779]: I0929 19:26:06.919353 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-6c8fdd957c-ft6jd"] Sep 29 19:26:06 crc kubenswrapper[4779]: I0929 19:26:06.926050 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-6c8fdd957c-ft6jd"] Sep 29 19:26:06 crc kubenswrapper[4779]: E0929 19:26:06.945147 4779 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8ac1ccdd_a13c_4f3c_9e06_731454c5f58d.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8ac1ccdd_a13c_4f3c_9e06_731454c5f58d.slice/crio-454ebba1ca13cbd1655115b4f6b9309da8ad7b7b164c81456af654ec81f1733e\": RecentStats: unable to find data in memory cache]" Sep 29 19:26:06 crc kubenswrapper[4779]: I0929 19:26:06.963745 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/3225f7bc-88d9-4d11-a415-e6a421573849-config\") pod \"neutron-d6758dbc9-fppqt\" (UID: \"3225f7bc-88d9-4d11-a415-e6a421573849\") " pod="openstack/neutron-d6758dbc9-fppqt" Sep 29 19:26:06 crc kubenswrapper[4779]: I0929 19:26:06.963798 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/3225f7bc-88d9-4d11-a415-e6a421573849-ovndb-tls-certs\") pod \"neutron-d6758dbc9-fppqt\" (UID: \"3225f7bc-88d9-4d11-a415-e6a421573849\") " pod="openstack/neutron-d6758dbc9-fppqt" Sep 29 19:26:06 crc kubenswrapper[4779]: I0929 19:26:06.963922 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/3225f7bc-88d9-4d11-a415-e6a421573849-httpd-config\") pod \"neutron-d6758dbc9-fppqt\" (UID: \"3225f7bc-88d9-4d11-a415-e6a421573849\") " pod="openstack/neutron-d6758dbc9-fppqt" Sep 29 19:26:06 crc kubenswrapper[4779]: I0929 19:26:06.963960 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/3225f7bc-88d9-4d11-a415-e6a421573849-public-tls-certs\") pod \"neutron-d6758dbc9-fppqt\" (UID: \"3225f7bc-88d9-4d11-a415-e6a421573849\") " pod="openstack/neutron-d6758dbc9-fppqt" Sep 29 19:26:06 crc kubenswrapper[4779]: I0929 19:26:06.964071 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/3225f7bc-88d9-4d11-a415-e6a421573849-internal-tls-certs\") pod \"neutron-d6758dbc9-fppqt\" (UID: \"3225f7bc-88d9-4d11-a415-e6a421573849\") " pod="openstack/neutron-d6758dbc9-fppqt" Sep 29 19:26:06 crc kubenswrapper[4779]: I0929 19:26:06.964138 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3225f7bc-88d9-4d11-a415-e6a421573849-combined-ca-bundle\") pod \"neutron-d6758dbc9-fppqt\" (UID: \"3225f7bc-88d9-4d11-a415-e6a421573849\") " pod="openstack/neutron-d6758dbc9-fppqt" Sep 29 19:26:06 crc kubenswrapper[4779]: I0929 19:26:06.964164 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w9kkp\" (UniqueName: \"kubernetes.io/projected/3225f7bc-88d9-4d11-a415-e6a421573849-kube-api-access-w9kkp\") pod \"neutron-d6758dbc9-fppqt\" (UID: \"3225f7bc-88d9-4d11-a415-e6a421573849\") " pod="openstack/neutron-d6758dbc9-fppqt" Sep 29 19:26:06 crc kubenswrapper[4779]: I0929 19:26:06.969857 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/3225f7bc-88d9-4d11-a415-e6a421573849-config\") pod \"neutron-d6758dbc9-fppqt\" (UID: \"3225f7bc-88d9-4d11-a415-e6a421573849\") " pod="openstack/neutron-d6758dbc9-fppqt" Sep 29 19:26:06 crc kubenswrapper[4779]: I0929 19:26:06.972027 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/3225f7bc-88d9-4d11-a415-e6a421573849-ovndb-tls-certs\") pod \"neutron-d6758dbc9-fppqt\" (UID: \"3225f7bc-88d9-4d11-a415-e6a421573849\") " pod="openstack/neutron-d6758dbc9-fppqt" Sep 29 19:26:06 crc kubenswrapper[4779]: I0929 19:26:06.972572 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/3225f7bc-88d9-4d11-a415-e6a421573849-public-tls-certs\") pod \"neutron-d6758dbc9-fppqt\" (UID: \"3225f7bc-88d9-4d11-a415-e6a421573849\") " pod="openstack/neutron-d6758dbc9-fppqt" Sep 29 19:26:06 crc kubenswrapper[4779]: I0929 19:26:06.980097 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3225f7bc-88d9-4d11-a415-e6a421573849-combined-ca-bundle\") pod \"neutron-d6758dbc9-fppqt\" (UID: \"3225f7bc-88d9-4d11-a415-e6a421573849\") " pod="openstack/neutron-d6758dbc9-fppqt" Sep 29 19:26:06 crc kubenswrapper[4779]: I0929 19:26:06.988854 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w9kkp\" (UniqueName: \"kubernetes.io/projected/3225f7bc-88d9-4d11-a415-e6a421573849-kube-api-access-w9kkp\") pod \"neutron-d6758dbc9-fppqt\" (UID: \"3225f7bc-88d9-4d11-a415-e6a421573849\") " pod="openstack/neutron-d6758dbc9-fppqt" Sep 29 19:26:06 crc kubenswrapper[4779]: I0929 19:26:06.995138 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/3225f7bc-88d9-4d11-a415-e6a421573849-internal-tls-certs\") pod \"neutron-d6758dbc9-fppqt\" (UID: \"3225f7bc-88d9-4d11-a415-e6a421573849\") " pod="openstack/neutron-d6758dbc9-fppqt" Sep 29 19:26:07 crc kubenswrapper[4779]: I0929 19:26:07.008791 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/3225f7bc-88d9-4d11-a415-e6a421573849-httpd-config\") pod \"neutron-d6758dbc9-fppqt\" (UID: \"3225f7bc-88d9-4d11-a415-e6a421573849\") " pod="openstack/neutron-d6758dbc9-fppqt" Sep 29 19:26:07 crc kubenswrapper[4779]: I0929 19:26:07.062853 4779 scope.go:117] "RemoveContainer" containerID="28c041be6ca4d08a38ed85b2b19f3ce8abf070551e701d0eba1bbea6f712dd6c" Sep 29 19:26:07 crc kubenswrapper[4779]: I0929 19:26:07.107637 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-d6758dbc9-fppqt" Sep 29 19:26:07 crc kubenswrapper[4779]: I0929 19:26:07.779954 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8ac1ccdd-a13c-4f3c-9e06-731454c5f58d" path="/var/lib/kubelet/pods/8ac1ccdd-a13c-4f3c-9e06-731454c5f58d/volumes" Sep 29 19:26:07 crc kubenswrapper[4779]: I0929 19:26:07.780963 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-d6758dbc9-fppqt"] Sep 29 19:26:07 crc kubenswrapper[4779]: I0929 19:26:07.813220 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"2e4b85f8-60be-4e1d-9fcd-a227efd711fd","Type":"ContainerStarted","Data":"9119ca905316b070b1d8ea07fa2990dbb483f21e8ff2be3166d2866e3ad7197b"} Sep 29 19:26:08 crc kubenswrapper[4779]: I0929 19:26:08.239200 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/horizon-6f7f5b6d48-8js86" Sep 29 19:26:08 crc kubenswrapper[4779]: I0929 19:26:08.414388 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/horizon-fc6fd7df6-btpzz" Sep 29 19:26:08 crc kubenswrapper[4779]: I0929 19:26:08.523691 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-6f7f5b6d48-8js86"] Sep 29 19:26:08 crc kubenswrapper[4779]: I0929 19:26:08.859921 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"2e4b85f8-60be-4e1d-9fcd-a227efd711fd","Type":"ContainerStarted","Data":"d415578bba65d5d24470e52ca7d0a3b4801b581d2265f0736392a3ba8a920675"} Sep 29 19:26:08 crc kubenswrapper[4779]: I0929 19:26:08.866394 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-6f7f5b6d48-8js86" podUID="442ef8d4-8019-432f-8715-9b2a5aaaa022" containerName="horizon-log" containerID="cri-o://193924d7b40387c777023562b1dc745c8cabd5533a384e043e26a919e41b5bb1" gracePeriod=30 Sep 29 19:26:08 crc kubenswrapper[4779]: I0929 19:26:08.867809 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-d6758dbc9-fppqt" event={"ID":"3225f7bc-88d9-4d11-a415-e6a421573849","Type":"ContainerStarted","Data":"cb0d4cafcff921559b74c2ff7c82bdd9d90f4ea8454128cedb1ebc1b319ed819"} Sep 29 19:26:08 crc kubenswrapper[4779]: I0929 19:26:08.867916 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-d6758dbc9-fppqt" event={"ID":"3225f7bc-88d9-4d11-a415-e6a421573849","Type":"ContainerStarted","Data":"2a5d63e8e06f7c46a19fe0cf2063a4df385c59e6839da01fb19d6ee115e04e26"} Sep 29 19:26:08 crc kubenswrapper[4779]: I0929 19:26:08.867993 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-d6758dbc9-fppqt" event={"ID":"3225f7bc-88d9-4d11-a415-e6a421573849","Type":"ContainerStarted","Data":"43feceb349dd44310dc02fe25b4237826f3fe0839bfdaed36d6ba480e0b4a36b"} Sep 29 19:26:08 crc kubenswrapper[4779]: I0929 19:26:08.868072 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/neutron-d6758dbc9-fppqt" Sep 29 19:26:08 crc kubenswrapper[4779]: I0929 19:26:08.869584 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-6f7f5b6d48-8js86" podUID="442ef8d4-8019-432f-8715-9b2a5aaaa022" containerName="horizon" containerID="cri-o://a7358d5a6b25fe588711b862903a2db92f9bbab4cc48eab5bf60c35eecf79ece" gracePeriod=30 Sep 29 19:26:08 crc kubenswrapper[4779]: I0929 19:26:08.893575 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-d6758dbc9-fppqt" podStartSLOduration=2.893543892 podStartE2EDuration="2.893543892s" podCreationTimestamp="2025-09-29 19:26:06 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 19:26:08.890682524 +0000 UTC m=+1079.775107624" watchObservedRunningTime="2025-09-29 19:26:08.893543892 +0000 UTC m=+1079.777968992" Sep 29 19:26:09 crc kubenswrapper[4779]: I0929 19:26:09.195455 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/placement-7846dbb58d-4ftsw" Sep 29 19:26:09 crc kubenswrapper[4779]: I0929 19:26:09.208481 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/placement-7846dbb58d-4ftsw" Sep 29 19:26:09 crc kubenswrapper[4779]: I0929 19:26:09.907475 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"2e4b85f8-60be-4e1d-9fcd-a227efd711fd","Type":"ContainerStarted","Data":"d3217bb118bd7b17be40a662079994298427790a37cecb364347611119086955"} Sep 29 19:26:09 crc kubenswrapper[4779]: I0929 19:26:09.907875 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Sep 29 19:26:09 crc kubenswrapper[4779]: I0929 19:26:09.935635 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.001275168 podStartE2EDuration="7.935615225s" podCreationTimestamp="2025-09-29 19:26:02 +0000 UTC" firstStartedPulling="2025-09-29 19:26:03.583774137 +0000 UTC m=+1074.468199277" lastFinishedPulling="2025-09-29 19:26:09.518114234 +0000 UTC m=+1080.402539334" observedRunningTime="2025-09-29 19:26:09.930367603 +0000 UTC m=+1080.814792703" watchObservedRunningTime="2025-09-29 19:26:09.935615225 +0000 UTC m=+1080.820040325" Sep 29 19:26:12 crc kubenswrapper[4779]: I0929 19:26:12.941631 4779 generic.go:334] "Generic (PLEG): container finished" podID="442ef8d4-8019-432f-8715-9b2a5aaaa022" containerID="a7358d5a6b25fe588711b862903a2db92f9bbab4cc48eab5bf60c35eecf79ece" exitCode=0 Sep 29 19:26:12 crc kubenswrapper[4779]: I0929 19:26:12.941694 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-6f7f5b6d48-8js86" event={"ID":"442ef8d4-8019-432f-8715-9b2a5aaaa022","Type":"ContainerDied","Data":"a7358d5a6b25fe588711b862903a2db92f9bbab4cc48eab5bf60c35eecf79ece"} Sep 29 19:26:13 crc kubenswrapper[4779]: I0929 19:26:13.729854 4779 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-6f7f5b6d48-8js86" podUID="442ef8d4-8019-432f-8715-9b2a5aaaa022" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.144:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.144:8443: connect: connection refused" Sep 29 19:26:13 crc kubenswrapper[4779]: I0929 19:26:13.957663 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-2kkwn" event={"ID":"0af2a79f-956f-478b-baa6-bc18b1accce9","Type":"ContainerStarted","Data":"104d8711f2e258f7c757a2a97f339867313c4108f16af3aae11a3e03b3d809d5"} Sep 29 19:26:13 crc kubenswrapper[4779]: I0929 19:26:13.997568 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-db-sync-2kkwn" podStartSLOduration=5.197229446 podStartE2EDuration="36.997553161s" podCreationTimestamp="2025-09-29 19:25:37 +0000 UTC" firstStartedPulling="2025-09-29 19:25:41.473411252 +0000 UTC m=+1052.357836342" lastFinishedPulling="2025-09-29 19:26:13.273734917 +0000 UTC m=+1084.158160057" observedRunningTime="2025-09-29 19:26:13.984007874 +0000 UTC m=+1084.868432984" watchObservedRunningTime="2025-09-29 19:26:13.997553161 +0000 UTC m=+1084.881978261" Sep 29 19:26:14 crc kubenswrapper[4779]: I0929 19:26:14.232510 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-84b966f6c9-vmh6c" Sep 29 19:26:14 crc kubenswrapper[4779]: I0929 19:26:14.320773 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-8b5c85b87-pckt5"] Sep 29 19:26:14 crc kubenswrapper[4779]: I0929 19:26:14.321023 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-8b5c85b87-pckt5" podUID="5551aa76-af05-4686-b100-9b9f0664be70" containerName="dnsmasq-dns" containerID="cri-o://47064a5e1cc018d5b39ac1ee45e9e7849469e46160a3b15be8f9f6d72a13e757" gracePeriod=10 Sep 29 19:26:14 crc kubenswrapper[4779]: I0929 19:26:14.349465 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/keystone-6d4574fd6f-56jht" Sep 29 19:26:14 crc kubenswrapper[4779]: I0929 19:26:14.803443 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8b5c85b87-pckt5" Sep 29 19:26:14 crc kubenswrapper[4779]: I0929 19:26:14.909923 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/5551aa76-af05-4686-b100-9b9f0664be70-dns-swift-storage-0\") pod \"5551aa76-af05-4686-b100-9b9f0664be70\" (UID: \"5551aa76-af05-4686-b100-9b9f0664be70\") " Sep 29 19:26:14 crc kubenswrapper[4779]: I0929 19:26:14.909987 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gmvrf\" (UniqueName: \"kubernetes.io/projected/5551aa76-af05-4686-b100-9b9f0664be70-kube-api-access-gmvrf\") pod \"5551aa76-af05-4686-b100-9b9f0664be70\" (UID: \"5551aa76-af05-4686-b100-9b9f0664be70\") " Sep 29 19:26:14 crc kubenswrapper[4779]: I0929 19:26:14.910006 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5551aa76-af05-4686-b100-9b9f0664be70-ovsdbserver-nb\") pod \"5551aa76-af05-4686-b100-9b9f0664be70\" (UID: \"5551aa76-af05-4686-b100-9b9f0664be70\") " Sep 29 19:26:14 crc kubenswrapper[4779]: I0929 19:26:14.910084 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5551aa76-af05-4686-b100-9b9f0664be70-config\") pod \"5551aa76-af05-4686-b100-9b9f0664be70\" (UID: \"5551aa76-af05-4686-b100-9b9f0664be70\") " Sep 29 19:26:14 crc kubenswrapper[4779]: I0929 19:26:14.910105 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5551aa76-af05-4686-b100-9b9f0664be70-ovsdbserver-sb\") pod \"5551aa76-af05-4686-b100-9b9f0664be70\" (UID: \"5551aa76-af05-4686-b100-9b9f0664be70\") " Sep 29 19:26:14 crc kubenswrapper[4779]: I0929 19:26:14.910157 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5551aa76-af05-4686-b100-9b9f0664be70-dns-svc\") pod \"5551aa76-af05-4686-b100-9b9f0664be70\" (UID: \"5551aa76-af05-4686-b100-9b9f0664be70\") " Sep 29 19:26:14 crc kubenswrapper[4779]: I0929 19:26:14.931851 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5551aa76-af05-4686-b100-9b9f0664be70-kube-api-access-gmvrf" (OuterVolumeSpecName: "kube-api-access-gmvrf") pod "5551aa76-af05-4686-b100-9b9f0664be70" (UID: "5551aa76-af05-4686-b100-9b9f0664be70"). InnerVolumeSpecName "kube-api-access-gmvrf". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:26:14 crc kubenswrapper[4779]: I0929 19:26:14.973557 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5551aa76-af05-4686-b100-9b9f0664be70-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "5551aa76-af05-4686-b100-9b9f0664be70" (UID: "5551aa76-af05-4686-b100-9b9f0664be70"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:26:14 crc kubenswrapper[4779]: I0929 19:26:14.987022 4779 generic.go:334] "Generic (PLEG): container finished" podID="5551aa76-af05-4686-b100-9b9f0664be70" containerID="47064a5e1cc018d5b39ac1ee45e9e7849469e46160a3b15be8f9f6d72a13e757" exitCode=0 Sep 29 19:26:14 crc kubenswrapper[4779]: I0929 19:26:14.987065 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8b5c85b87-pckt5" event={"ID":"5551aa76-af05-4686-b100-9b9f0664be70","Type":"ContainerDied","Data":"47064a5e1cc018d5b39ac1ee45e9e7849469e46160a3b15be8f9f6d72a13e757"} Sep 29 19:26:14 crc kubenswrapper[4779]: I0929 19:26:14.987114 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8b5c85b87-pckt5" event={"ID":"5551aa76-af05-4686-b100-9b9f0664be70","Type":"ContainerDied","Data":"4037f0a02db51d3034d10b53b7f90d49130fe477309fa95534242238225155b3"} Sep 29 19:26:14 crc kubenswrapper[4779]: I0929 19:26:14.987132 4779 scope.go:117] "RemoveContainer" containerID="47064a5e1cc018d5b39ac1ee45e9e7849469e46160a3b15be8f9f6d72a13e757" Sep 29 19:26:14 crc kubenswrapper[4779]: I0929 19:26:14.987222 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8b5c85b87-pckt5" Sep 29 19:26:14 crc kubenswrapper[4779]: I0929 19:26:14.989658 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5551aa76-af05-4686-b100-9b9f0664be70-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "5551aa76-af05-4686-b100-9b9f0664be70" (UID: "5551aa76-af05-4686-b100-9b9f0664be70"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:26:14 crc kubenswrapper[4779]: I0929 19:26:14.990899 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5551aa76-af05-4686-b100-9b9f0664be70-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "5551aa76-af05-4686-b100-9b9f0664be70" (UID: "5551aa76-af05-4686-b100-9b9f0664be70"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:26:14 crc kubenswrapper[4779]: I0929 19:26:14.996625 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5551aa76-af05-4686-b100-9b9f0664be70-config" (OuterVolumeSpecName: "config") pod "5551aa76-af05-4686-b100-9b9f0664be70" (UID: "5551aa76-af05-4686-b100-9b9f0664be70"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:26:15 crc kubenswrapper[4779]: I0929 19:26:15.005635 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5551aa76-af05-4686-b100-9b9f0664be70-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "5551aa76-af05-4686-b100-9b9f0664be70" (UID: "5551aa76-af05-4686-b100-9b9f0664be70"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:26:15 crc kubenswrapper[4779]: I0929 19:26:15.019003 4779 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/5551aa76-af05-4686-b100-9b9f0664be70-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Sep 29 19:26:15 crc kubenswrapper[4779]: I0929 19:26:15.019046 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gmvrf\" (UniqueName: \"kubernetes.io/projected/5551aa76-af05-4686-b100-9b9f0664be70-kube-api-access-gmvrf\") on node \"crc\" DevicePath \"\"" Sep 29 19:26:15 crc kubenswrapper[4779]: I0929 19:26:15.019074 4779 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5551aa76-af05-4686-b100-9b9f0664be70-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Sep 29 19:26:15 crc kubenswrapper[4779]: I0929 19:26:15.019101 4779 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5551aa76-af05-4686-b100-9b9f0664be70-config\") on node \"crc\" DevicePath \"\"" Sep 29 19:26:15 crc kubenswrapper[4779]: I0929 19:26:15.019114 4779 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5551aa76-af05-4686-b100-9b9f0664be70-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Sep 29 19:26:15 crc kubenswrapper[4779]: I0929 19:26:15.019127 4779 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5551aa76-af05-4686-b100-9b9f0664be70-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 29 19:26:15 crc kubenswrapper[4779]: I0929 19:26:15.032267 4779 scope.go:117] "RemoveContainer" containerID="7d509481fd4d21f0975019dbb02876aa2108cb9a1c3fbb4eda4303526583cec5" Sep 29 19:26:15 crc kubenswrapper[4779]: I0929 19:26:15.056569 4779 scope.go:117] "RemoveContainer" containerID="47064a5e1cc018d5b39ac1ee45e9e7849469e46160a3b15be8f9f6d72a13e757" Sep 29 19:26:15 crc kubenswrapper[4779]: E0929 19:26:15.056965 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"47064a5e1cc018d5b39ac1ee45e9e7849469e46160a3b15be8f9f6d72a13e757\": container with ID starting with 47064a5e1cc018d5b39ac1ee45e9e7849469e46160a3b15be8f9f6d72a13e757 not found: ID does not exist" containerID="47064a5e1cc018d5b39ac1ee45e9e7849469e46160a3b15be8f9f6d72a13e757" Sep 29 19:26:15 crc kubenswrapper[4779]: I0929 19:26:15.057008 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"47064a5e1cc018d5b39ac1ee45e9e7849469e46160a3b15be8f9f6d72a13e757"} err="failed to get container status \"47064a5e1cc018d5b39ac1ee45e9e7849469e46160a3b15be8f9f6d72a13e757\": rpc error: code = NotFound desc = could not find container \"47064a5e1cc018d5b39ac1ee45e9e7849469e46160a3b15be8f9f6d72a13e757\": container with ID starting with 47064a5e1cc018d5b39ac1ee45e9e7849469e46160a3b15be8f9f6d72a13e757 not found: ID does not exist" Sep 29 19:26:15 crc kubenswrapper[4779]: I0929 19:26:15.057034 4779 scope.go:117] "RemoveContainer" containerID="7d509481fd4d21f0975019dbb02876aa2108cb9a1c3fbb4eda4303526583cec5" Sep 29 19:26:15 crc kubenswrapper[4779]: E0929 19:26:15.057457 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7d509481fd4d21f0975019dbb02876aa2108cb9a1c3fbb4eda4303526583cec5\": container with ID starting with 7d509481fd4d21f0975019dbb02876aa2108cb9a1c3fbb4eda4303526583cec5 not found: ID does not exist" containerID="7d509481fd4d21f0975019dbb02876aa2108cb9a1c3fbb4eda4303526583cec5" Sep 29 19:26:15 crc kubenswrapper[4779]: I0929 19:26:15.057523 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7d509481fd4d21f0975019dbb02876aa2108cb9a1c3fbb4eda4303526583cec5"} err="failed to get container status \"7d509481fd4d21f0975019dbb02876aa2108cb9a1c3fbb4eda4303526583cec5\": rpc error: code = NotFound desc = could not find container \"7d509481fd4d21f0975019dbb02876aa2108cb9a1c3fbb4eda4303526583cec5\": container with ID starting with 7d509481fd4d21f0975019dbb02876aa2108cb9a1c3fbb4eda4303526583cec5 not found: ID does not exist" Sep 29 19:26:15 crc kubenswrapper[4779]: I0929 19:26:15.340881 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-8b5c85b87-pckt5"] Sep 29 19:26:15 crc kubenswrapper[4779]: I0929 19:26:15.349671 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-8b5c85b87-pckt5"] Sep 29 19:26:15 crc kubenswrapper[4779]: I0929 19:26:15.778899 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5551aa76-af05-4686-b100-9b9f0664be70" path="/var/lib/kubelet/pods/5551aa76-af05-4686-b100-9b9f0664be70/volumes" Sep 29 19:26:16 crc kubenswrapper[4779]: I0929 19:26:16.968698 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstackclient"] Sep 29 19:26:16 crc kubenswrapper[4779]: E0929 19:26:16.969433 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5551aa76-af05-4686-b100-9b9f0664be70" containerName="dnsmasq-dns" Sep 29 19:26:16 crc kubenswrapper[4779]: I0929 19:26:16.969466 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="5551aa76-af05-4686-b100-9b9f0664be70" containerName="dnsmasq-dns" Sep 29 19:26:16 crc kubenswrapper[4779]: E0929 19:26:16.969511 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5551aa76-af05-4686-b100-9b9f0664be70" containerName="init" Sep 29 19:26:16 crc kubenswrapper[4779]: I0929 19:26:16.969528 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="5551aa76-af05-4686-b100-9b9f0664be70" containerName="init" Sep 29 19:26:16 crc kubenswrapper[4779]: I0929 19:26:16.969861 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="5551aa76-af05-4686-b100-9b9f0664be70" containerName="dnsmasq-dns" Sep 29 19:26:16 crc kubenswrapper[4779]: I0929 19:26:16.971086 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Sep 29 19:26:16 crc kubenswrapper[4779]: I0929 19:26:16.974612 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstackclient-openstackclient-dockercfg-wwm5b" Sep 29 19:26:16 crc kubenswrapper[4779]: I0929 19:26:16.974777 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-config-secret" Sep 29 19:26:16 crc kubenswrapper[4779]: I0929 19:26:16.974920 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-config" Sep 29 19:26:16 crc kubenswrapper[4779]: I0929 19:26:16.994037 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Sep 29 19:26:17 crc kubenswrapper[4779]: I0929 19:26:17.013688 4779 generic.go:334] "Generic (PLEG): container finished" podID="0af2a79f-956f-478b-baa6-bc18b1accce9" containerID="104d8711f2e258f7c757a2a97f339867313c4108f16af3aae11a3e03b3d809d5" exitCode=0 Sep 29 19:26:17 crc kubenswrapper[4779]: I0929 19:26:17.013725 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-2kkwn" event={"ID":"0af2a79f-956f-478b-baa6-bc18b1accce9","Type":"ContainerDied","Data":"104d8711f2e258f7c757a2a97f339867313c4108f16af3aae11a3e03b3d809d5"} Sep 29 19:26:17 crc kubenswrapper[4779]: I0929 19:26:17.058714 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w9mk9\" (UniqueName: \"kubernetes.io/projected/58f628df-8d11-4663-b84b-0c810edaa5fb-kube-api-access-w9mk9\") pod \"openstackclient\" (UID: \"58f628df-8d11-4663-b84b-0c810edaa5fb\") " pod="openstack/openstackclient" Sep 29 19:26:17 crc kubenswrapper[4779]: I0929 19:26:17.058771 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/58f628df-8d11-4663-b84b-0c810edaa5fb-openstack-config-secret\") pod \"openstackclient\" (UID: \"58f628df-8d11-4663-b84b-0c810edaa5fb\") " pod="openstack/openstackclient" Sep 29 19:26:17 crc kubenswrapper[4779]: I0929 19:26:17.059051 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/58f628df-8d11-4663-b84b-0c810edaa5fb-openstack-config\") pod \"openstackclient\" (UID: \"58f628df-8d11-4663-b84b-0c810edaa5fb\") " pod="openstack/openstackclient" Sep 29 19:26:17 crc kubenswrapper[4779]: I0929 19:26:17.060977 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/58f628df-8d11-4663-b84b-0c810edaa5fb-combined-ca-bundle\") pod \"openstackclient\" (UID: \"58f628df-8d11-4663-b84b-0c810edaa5fb\") " pod="openstack/openstackclient" Sep 29 19:26:17 crc kubenswrapper[4779]: I0929 19:26:17.162942 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/58f628df-8d11-4663-b84b-0c810edaa5fb-openstack-config\") pod \"openstackclient\" (UID: \"58f628df-8d11-4663-b84b-0c810edaa5fb\") " pod="openstack/openstackclient" Sep 29 19:26:17 crc kubenswrapper[4779]: I0929 19:26:17.163024 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/58f628df-8d11-4663-b84b-0c810edaa5fb-combined-ca-bundle\") pod \"openstackclient\" (UID: \"58f628df-8d11-4663-b84b-0c810edaa5fb\") " pod="openstack/openstackclient" Sep 29 19:26:17 crc kubenswrapper[4779]: I0929 19:26:17.163368 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w9mk9\" (UniqueName: \"kubernetes.io/projected/58f628df-8d11-4663-b84b-0c810edaa5fb-kube-api-access-w9mk9\") pod \"openstackclient\" (UID: \"58f628df-8d11-4663-b84b-0c810edaa5fb\") " pod="openstack/openstackclient" Sep 29 19:26:17 crc kubenswrapper[4779]: I0929 19:26:17.163460 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/58f628df-8d11-4663-b84b-0c810edaa5fb-openstack-config-secret\") pod \"openstackclient\" (UID: \"58f628df-8d11-4663-b84b-0c810edaa5fb\") " pod="openstack/openstackclient" Sep 29 19:26:17 crc kubenswrapper[4779]: I0929 19:26:17.164072 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/58f628df-8d11-4663-b84b-0c810edaa5fb-openstack-config\") pod \"openstackclient\" (UID: \"58f628df-8d11-4663-b84b-0c810edaa5fb\") " pod="openstack/openstackclient" Sep 29 19:26:17 crc kubenswrapper[4779]: I0929 19:26:17.169758 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/58f628df-8d11-4663-b84b-0c810edaa5fb-openstack-config-secret\") pod \"openstackclient\" (UID: \"58f628df-8d11-4663-b84b-0c810edaa5fb\") " pod="openstack/openstackclient" Sep 29 19:26:17 crc kubenswrapper[4779]: I0929 19:26:17.174037 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/58f628df-8d11-4663-b84b-0c810edaa5fb-combined-ca-bundle\") pod \"openstackclient\" (UID: \"58f628df-8d11-4663-b84b-0c810edaa5fb\") " pod="openstack/openstackclient" Sep 29 19:26:17 crc kubenswrapper[4779]: I0929 19:26:17.200637 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w9mk9\" (UniqueName: \"kubernetes.io/projected/58f628df-8d11-4663-b84b-0c810edaa5fb-kube-api-access-w9mk9\") pod \"openstackclient\" (UID: \"58f628df-8d11-4663-b84b-0c810edaa5fb\") " pod="openstack/openstackclient" Sep 29 19:26:17 crc kubenswrapper[4779]: I0929 19:26:17.305945 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Sep 29 19:26:17 crc kubenswrapper[4779]: I0929 19:26:17.741081 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Sep 29 19:26:17 crc kubenswrapper[4779]: W0929 19:26:17.748309 4779 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod58f628df_8d11_4663_b84b_0c810edaa5fb.slice/crio-356481f29a1b98548dd2dc0e91b050a7ab5fda74476d62e7736e1de8431f524a WatchSource:0}: Error finding container 356481f29a1b98548dd2dc0e91b050a7ab5fda74476d62e7736e1de8431f524a: Status 404 returned error can't find the container with id 356481f29a1b98548dd2dc0e91b050a7ab5fda74476d62e7736e1de8431f524a Sep 29 19:26:18 crc kubenswrapper[4779]: I0929 19:26:18.022338 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"58f628df-8d11-4663-b84b-0c810edaa5fb","Type":"ContainerStarted","Data":"356481f29a1b98548dd2dc0e91b050a7ab5fda74476d62e7736e1de8431f524a"} Sep 29 19:26:18 crc kubenswrapper[4779]: I0929 19:26:18.363174 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-2kkwn" Sep 29 19:26:18 crc kubenswrapper[4779]: I0929 19:26:18.385314 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/0af2a79f-956f-478b-baa6-bc18b1accce9-db-sync-config-data\") pod \"0af2a79f-956f-478b-baa6-bc18b1accce9\" (UID: \"0af2a79f-956f-478b-baa6-bc18b1accce9\") " Sep 29 19:26:18 crc kubenswrapper[4779]: I0929 19:26:18.385466 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0af2a79f-956f-478b-baa6-bc18b1accce9-combined-ca-bundle\") pod \"0af2a79f-956f-478b-baa6-bc18b1accce9\" (UID: \"0af2a79f-956f-478b-baa6-bc18b1accce9\") " Sep 29 19:26:18 crc kubenswrapper[4779]: I0929 19:26:18.385548 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8vgc5\" (UniqueName: \"kubernetes.io/projected/0af2a79f-956f-478b-baa6-bc18b1accce9-kube-api-access-8vgc5\") pod \"0af2a79f-956f-478b-baa6-bc18b1accce9\" (UID: \"0af2a79f-956f-478b-baa6-bc18b1accce9\") " Sep 29 19:26:18 crc kubenswrapper[4779]: I0929 19:26:18.397264 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0af2a79f-956f-478b-baa6-bc18b1accce9-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "0af2a79f-956f-478b-baa6-bc18b1accce9" (UID: "0af2a79f-956f-478b-baa6-bc18b1accce9"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:26:18 crc kubenswrapper[4779]: I0929 19:26:18.397551 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0af2a79f-956f-478b-baa6-bc18b1accce9-kube-api-access-8vgc5" (OuterVolumeSpecName: "kube-api-access-8vgc5") pod "0af2a79f-956f-478b-baa6-bc18b1accce9" (UID: "0af2a79f-956f-478b-baa6-bc18b1accce9"). InnerVolumeSpecName "kube-api-access-8vgc5". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:26:18 crc kubenswrapper[4779]: I0929 19:26:18.410071 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0af2a79f-956f-478b-baa6-bc18b1accce9-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "0af2a79f-956f-478b-baa6-bc18b1accce9" (UID: "0af2a79f-956f-478b-baa6-bc18b1accce9"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:26:18 crc kubenswrapper[4779]: I0929 19:26:18.488214 4779 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/0af2a79f-956f-478b-baa6-bc18b1accce9-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 19:26:18 crc kubenswrapper[4779]: I0929 19:26:18.488258 4779 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0af2a79f-956f-478b-baa6-bc18b1accce9-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 19:26:18 crc kubenswrapper[4779]: I0929 19:26:18.488272 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8vgc5\" (UniqueName: \"kubernetes.io/projected/0af2a79f-956f-478b-baa6-bc18b1accce9-kube-api-access-8vgc5\") on node \"crc\" DevicePath \"\"" Sep 29 19:26:19 crc kubenswrapper[4779]: I0929 19:26:19.034903 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-wnn5c" event={"ID":"e2a74779-76d8-4fee-bd24-cb11d5d72915","Type":"ContainerStarted","Data":"49fa49fd5f06670c57a43269ae45c6080feadd8d98820d10f268c747299a8ae2"} Sep 29 19:26:19 crc kubenswrapper[4779]: I0929 19:26:19.039086 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-2kkwn" event={"ID":"0af2a79f-956f-478b-baa6-bc18b1accce9","Type":"ContainerDied","Data":"5224312ab7ff387fb01862b7f5291e698487a00004ef17a9e48e56210b4157ab"} Sep 29 19:26:19 crc kubenswrapper[4779]: I0929 19:26:19.039115 4779 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5224312ab7ff387fb01862b7f5291e698487a00004ef17a9e48e56210b4157ab" Sep 29 19:26:19 crc kubenswrapper[4779]: I0929 19:26:19.039133 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-2kkwn" Sep 29 19:26:19 crc kubenswrapper[4779]: I0929 19:26:19.068811 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-db-sync-wnn5c" podStartSLOduration=5.377576203 podStartE2EDuration="42.068793833s" podCreationTimestamp="2025-09-29 19:25:37 +0000 UTC" firstStartedPulling="2025-09-29 19:25:41.491239076 +0000 UTC m=+1052.375664176" lastFinishedPulling="2025-09-29 19:26:18.182456716 +0000 UTC m=+1089.066881806" observedRunningTime="2025-09-29 19:26:19.062703187 +0000 UTC m=+1089.947128287" watchObservedRunningTime="2025-09-29 19:26:19.068793833 +0000 UTC m=+1089.953218933" Sep 29 19:26:19 crc kubenswrapper[4779]: I0929 19:26:19.310772 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-worker-7bd9b84c75-dphls"] Sep 29 19:26:19 crc kubenswrapper[4779]: E0929 19:26:19.311333 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0af2a79f-956f-478b-baa6-bc18b1accce9" containerName="barbican-db-sync" Sep 29 19:26:19 crc kubenswrapper[4779]: I0929 19:26:19.311355 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="0af2a79f-956f-478b-baa6-bc18b1accce9" containerName="barbican-db-sync" Sep 29 19:26:19 crc kubenswrapper[4779]: I0929 19:26:19.311583 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="0af2a79f-956f-478b-baa6-bc18b1accce9" containerName="barbican-db-sync" Sep 29 19:26:19 crc kubenswrapper[4779]: I0929 19:26:19.312716 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-7bd9b84c75-dphls" Sep 29 19:26:19 crc kubenswrapper[4779]: I0929 19:26:19.332091 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-config-data" Sep 29 19:26:19 crc kubenswrapper[4779]: I0929 19:26:19.332397 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-worker-config-data" Sep 29 19:26:19 crc kubenswrapper[4779]: I0929 19:26:19.332988 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-barbican-dockercfg-8lt8h" Sep 29 19:26:19 crc kubenswrapper[4779]: I0929 19:26:19.345414 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-worker-7bd9b84c75-dphls"] Sep 29 19:26:19 crc kubenswrapper[4779]: I0929 19:26:19.400860 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-keystone-listener-58f6cf64bb-t8fdp"] Sep 29 19:26:19 crc kubenswrapper[4779]: I0929 19:26:19.403617 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-58f6cf64bb-t8fdp" Sep 29 19:26:19 crc kubenswrapper[4779]: I0929 19:26:19.404817 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d3365fba-7e29-4f75-aa74-67ffd7275a15-combined-ca-bundle\") pod \"barbican-keystone-listener-58f6cf64bb-t8fdp\" (UID: \"d3365fba-7e29-4f75-aa74-67ffd7275a15\") " pod="openstack/barbican-keystone-listener-58f6cf64bb-t8fdp" Sep 29 19:26:19 crc kubenswrapper[4779]: I0929 19:26:19.404857 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7f98eb0b-f8cf-42b7-a6f7-5a803c2be72b-combined-ca-bundle\") pod \"barbican-worker-7bd9b84c75-dphls\" (UID: \"7f98eb0b-f8cf-42b7-a6f7-5a803c2be72b\") " pod="openstack/barbican-worker-7bd9b84c75-dphls" Sep 29 19:26:19 crc kubenswrapper[4779]: I0929 19:26:19.404896 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7f98eb0b-f8cf-42b7-a6f7-5a803c2be72b-logs\") pod \"barbican-worker-7bd9b84c75-dphls\" (UID: \"7f98eb0b-f8cf-42b7-a6f7-5a803c2be72b\") " pod="openstack/barbican-worker-7bd9b84c75-dphls" Sep 29 19:26:19 crc kubenswrapper[4779]: I0929 19:26:19.404920 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/d3365fba-7e29-4f75-aa74-67ffd7275a15-config-data-custom\") pod \"barbican-keystone-listener-58f6cf64bb-t8fdp\" (UID: \"d3365fba-7e29-4f75-aa74-67ffd7275a15\") " pod="openstack/barbican-keystone-listener-58f6cf64bb-t8fdp" Sep 29 19:26:19 crc kubenswrapper[4779]: I0929 19:26:19.404965 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-csldk\" (UniqueName: \"kubernetes.io/projected/d3365fba-7e29-4f75-aa74-67ffd7275a15-kube-api-access-csldk\") pod \"barbican-keystone-listener-58f6cf64bb-t8fdp\" (UID: \"d3365fba-7e29-4f75-aa74-67ffd7275a15\") " pod="openstack/barbican-keystone-listener-58f6cf64bb-t8fdp" Sep 29 19:26:19 crc kubenswrapper[4779]: I0929 19:26:19.404985 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7f98eb0b-f8cf-42b7-a6f7-5a803c2be72b-config-data\") pod \"barbican-worker-7bd9b84c75-dphls\" (UID: \"7f98eb0b-f8cf-42b7-a6f7-5a803c2be72b\") " pod="openstack/barbican-worker-7bd9b84c75-dphls" Sep 29 19:26:19 crc kubenswrapper[4779]: I0929 19:26:19.405032 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/7f98eb0b-f8cf-42b7-a6f7-5a803c2be72b-config-data-custom\") pod \"barbican-worker-7bd9b84c75-dphls\" (UID: \"7f98eb0b-f8cf-42b7-a6f7-5a803c2be72b\") " pod="openstack/barbican-worker-7bd9b84c75-dphls" Sep 29 19:26:19 crc kubenswrapper[4779]: I0929 19:26:19.405049 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d3365fba-7e29-4f75-aa74-67ffd7275a15-config-data\") pod \"barbican-keystone-listener-58f6cf64bb-t8fdp\" (UID: \"d3365fba-7e29-4f75-aa74-67ffd7275a15\") " pod="openstack/barbican-keystone-listener-58f6cf64bb-t8fdp" Sep 29 19:26:19 crc kubenswrapper[4779]: I0929 19:26:19.405064 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d3365fba-7e29-4f75-aa74-67ffd7275a15-logs\") pod \"barbican-keystone-listener-58f6cf64bb-t8fdp\" (UID: \"d3365fba-7e29-4f75-aa74-67ffd7275a15\") " pod="openstack/barbican-keystone-listener-58f6cf64bb-t8fdp" Sep 29 19:26:19 crc kubenswrapper[4779]: I0929 19:26:19.405087 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5rpjx\" (UniqueName: \"kubernetes.io/projected/7f98eb0b-f8cf-42b7-a6f7-5a803c2be72b-kube-api-access-5rpjx\") pod \"barbican-worker-7bd9b84c75-dphls\" (UID: \"7f98eb0b-f8cf-42b7-a6f7-5a803c2be72b\") " pod="openstack/barbican-worker-7bd9b84c75-dphls" Sep 29 19:26:19 crc kubenswrapper[4779]: I0929 19:26:19.406519 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-keystone-listener-config-data" Sep 29 19:26:19 crc kubenswrapper[4779]: I0929 19:26:19.469053 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-keystone-listener-58f6cf64bb-t8fdp"] Sep 29 19:26:19 crc kubenswrapper[4779]: I0929 19:26:19.508146 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d3365fba-7e29-4f75-aa74-67ffd7275a15-combined-ca-bundle\") pod \"barbican-keystone-listener-58f6cf64bb-t8fdp\" (UID: \"d3365fba-7e29-4f75-aa74-67ffd7275a15\") " pod="openstack/barbican-keystone-listener-58f6cf64bb-t8fdp" Sep 29 19:26:19 crc kubenswrapper[4779]: I0929 19:26:19.508182 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7f98eb0b-f8cf-42b7-a6f7-5a803c2be72b-combined-ca-bundle\") pod \"barbican-worker-7bd9b84c75-dphls\" (UID: \"7f98eb0b-f8cf-42b7-a6f7-5a803c2be72b\") " pod="openstack/barbican-worker-7bd9b84c75-dphls" Sep 29 19:26:19 crc kubenswrapper[4779]: I0929 19:26:19.508221 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7f98eb0b-f8cf-42b7-a6f7-5a803c2be72b-logs\") pod \"barbican-worker-7bd9b84c75-dphls\" (UID: \"7f98eb0b-f8cf-42b7-a6f7-5a803c2be72b\") " pod="openstack/barbican-worker-7bd9b84c75-dphls" Sep 29 19:26:19 crc kubenswrapper[4779]: I0929 19:26:19.508249 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/d3365fba-7e29-4f75-aa74-67ffd7275a15-config-data-custom\") pod \"barbican-keystone-listener-58f6cf64bb-t8fdp\" (UID: \"d3365fba-7e29-4f75-aa74-67ffd7275a15\") " pod="openstack/barbican-keystone-listener-58f6cf64bb-t8fdp" Sep 29 19:26:19 crc kubenswrapper[4779]: I0929 19:26:19.508297 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-csldk\" (UniqueName: \"kubernetes.io/projected/d3365fba-7e29-4f75-aa74-67ffd7275a15-kube-api-access-csldk\") pod \"barbican-keystone-listener-58f6cf64bb-t8fdp\" (UID: \"d3365fba-7e29-4f75-aa74-67ffd7275a15\") " pod="openstack/barbican-keystone-listener-58f6cf64bb-t8fdp" Sep 29 19:26:19 crc kubenswrapper[4779]: I0929 19:26:19.508329 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7f98eb0b-f8cf-42b7-a6f7-5a803c2be72b-config-data\") pod \"barbican-worker-7bd9b84c75-dphls\" (UID: \"7f98eb0b-f8cf-42b7-a6f7-5a803c2be72b\") " pod="openstack/barbican-worker-7bd9b84c75-dphls" Sep 29 19:26:19 crc kubenswrapper[4779]: I0929 19:26:19.508381 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/7f98eb0b-f8cf-42b7-a6f7-5a803c2be72b-config-data-custom\") pod \"barbican-worker-7bd9b84c75-dphls\" (UID: \"7f98eb0b-f8cf-42b7-a6f7-5a803c2be72b\") " pod="openstack/barbican-worker-7bd9b84c75-dphls" Sep 29 19:26:19 crc kubenswrapper[4779]: I0929 19:26:19.508398 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d3365fba-7e29-4f75-aa74-67ffd7275a15-config-data\") pod \"barbican-keystone-listener-58f6cf64bb-t8fdp\" (UID: \"d3365fba-7e29-4f75-aa74-67ffd7275a15\") " pod="openstack/barbican-keystone-listener-58f6cf64bb-t8fdp" Sep 29 19:26:19 crc kubenswrapper[4779]: I0929 19:26:19.508414 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d3365fba-7e29-4f75-aa74-67ffd7275a15-logs\") pod \"barbican-keystone-listener-58f6cf64bb-t8fdp\" (UID: \"d3365fba-7e29-4f75-aa74-67ffd7275a15\") " pod="openstack/barbican-keystone-listener-58f6cf64bb-t8fdp" Sep 29 19:26:19 crc kubenswrapper[4779]: I0929 19:26:19.508437 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5rpjx\" (UniqueName: \"kubernetes.io/projected/7f98eb0b-f8cf-42b7-a6f7-5a803c2be72b-kube-api-access-5rpjx\") pod \"barbican-worker-7bd9b84c75-dphls\" (UID: \"7f98eb0b-f8cf-42b7-a6f7-5a803c2be72b\") " pod="openstack/barbican-worker-7bd9b84c75-dphls" Sep 29 19:26:19 crc kubenswrapper[4779]: I0929 19:26:19.520543 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7f98eb0b-f8cf-42b7-a6f7-5a803c2be72b-logs\") pod \"barbican-worker-7bd9b84c75-dphls\" (UID: \"7f98eb0b-f8cf-42b7-a6f7-5a803c2be72b\") " pod="openstack/barbican-worker-7bd9b84c75-dphls" Sep 29 19:26:19 crc kubenswrapper[4779]: I0929 19:26:19.520939 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7f98eb0b-f8cf-42b7-a6f7-5a803c2be72b-combined-ca-bundle\") pod \"barbican-worker-7bd9b84c75-dphls\" (UID: \"7f98eb0b-f8cf-42b7-a6f7-5a803c2be72b\") " pod="openstack/barbican-worker-7bd9b84c75-dphls" Sep 29 19:26:19 crc kubenswrapper[4779]: I0929 19:26:19.521621 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d3365fba-7e29-4f75-aa74-67ffd7275a15-logs\") pod \"barbican-keystone-listener-58f6cf64bb-t8fdp\" (UID: \"d3365fba-7e29-4f75-aa74-67ffd7275a15\") " pod="openstack/barbican-keystone-listener-58f6cf64bb-t8fdp" Sep 29 19:26:19 crc kubenswrapper[4779]: I0929 19:26:19.523575 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-75c8ddd69c-nhvtq"] Sep 29 19:26:19 crc kubenswrapper[4779]: I0929 19:26:19.524918 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-75c8ddd69c-nhvtq" Sep 29 19:26:19 crc kubenswrapper[4779]: I0929 19:26:19.526085 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/7f98eb0b-f8cf-42b7-a6f7-5a803c2be72b-config-data-custom\") pod \"barbican-worker-7bd9b84c75-dphls\" (UID: \"7f98eb0b-f8cf-42b7-a6f7-5a803c2be72b\") " pod="openstack/barbican-worker-7bd9b84c75-dphls" Sep 29 19:26:19 crc kubenswrapper[4779]: I0929 19:26:19.526175 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d3365fba-7e29-4f75-aa74-67ffd7275a15-combined-ca-bundle\") pod \"barbican-keystone-listener-58f6cf64bb-t8fdp\" (UID: \"d3365fba-7e29-4f75-aa74-67ffd7275a15\") " pod="openstack/barbican-keystone-listener-58f6cf64bb-t8fdp" Sep 29 19:26:19 crc kubenswrapper[4779]: I0929 19:26:19.526363 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7f98eb0b-f8cf-42b7-a6f7-5a803c2be72b-config-data\") pod \"barbican-worker-7bd9b84c75-dphls\" (UID: \"7f98eb0b-f8cf-42b7-a6f7-5a803c2be72b\") " pod="openstack/barbican-worker-7bd9b84c75-dphls" Sep 29 19:26:19 crc kubenswrapper[4779]: I0929 19:26:19.529245 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/d3365fba-7e29-4f75-aa74-67ffd7275a15-config-data-custom\") pod \"barbican-keystone-listener-58f6cf64bb-t8fdp\" (UID: \"d3365fba-7e29-4f75-aa74-67ffd7275a15\") " pod="openstack/barbican-keystone-listener-58f6cf64bb-t8fdp" Sep 29 19:26:19 crc kubenswrapper[4779]: I0929 19:26:19.537513 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5rpjx\" (UniqueName: \"kubernetes.io/projected/7f98eb0b-f8cf-42b7-a6f7-5a803c2be72b-kube-api-access-5rpjx\") pod \"barbican-worker-7bd9b84c75-dphls\" (UID: \"7f98eb0b-f8cf-42b7-a6f7-5a803c2be72b\") " pod="openstack/barbican-worker-7bd9b84c75-dphls" Sep 29 19:26:19 crc kubenswrapper[4779]: I0929 19:26:19.537579 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-75c8ddd69c-nhvtq"] Sep 29 19:26:19 crc kubenswrapper[4779]: I0929 19:26:19.548288 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d3365fba-7e29-4f75-aa74-67ffd7275a15-config-data\") pod \"barbican-keystone-listener-58f6cf64bb-t8fdp\" (UID: \"d3365fba-7e29-4f75-aa74-67ffd7275a15\") " pod="openstack/barbican-keystone-listener-58f6cf64bb-t8fdp" Sep 29 19:26:19 crc kubenswrapper[4779]: I0929 19:26:19.550522 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-api-544464c4cd-mfj4x"] Sep 29 19:26:19 crc kubenswrapper[4779]: I0929 19:26:19.552785 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-544464c4cd-mfj4x" Sep 29 19:26:19 crc kubenswrapper[4779]: I0929 19:26:19.557689 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-api-config-data" Sep 29 19:26:19 crc kubenswrapper[4779]: I0929 19:26:19.564176 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-csldk\" (UniqueName: \"kubernetes.io/projected/d3365fba-7e29-4f75-aa74-67ffd7275a15-kube-api-access-csldk\") pod \"barbican-keystone-listener-58f6cf64bb-t8fdp\" (UID: \"d3365fba-7e29-4f75-aa74-67ffd7275a15\") " pod="openstack/barbican-keystone-listener-58f6cf64bb-t8fdp" Sep 29 19:26:19 crc kubenswrapper[4779]: I0929 19:26:19.583717 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-544464c4cd-mfj4x"] Sep 29 19:26:19 crc kubenswrapper[4779]: I0929 19:26:19.672231 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-7bd9b84c75-dphls" Sep 29 19:26:19 crc kubenswrapper[4779]: I0929 19:26:19.734681 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-58f6cf64bb-t8fdp" Sep 29 19:26:19 crc kubenswrapper[4779]: I0929 19:26:19.738357 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/bd571bff-be79-40df-b5bb-408fb8017dcc-ovsdbserver-nb\") pod \"dnsmasq-dns-75c8ddd69c-nhvtq\" (UID: \"bd571bff-be79-40df-b5bb-408fb8017dcc\") " pod="openstack/dnsmasq-dns-75c8ddd69c-nhvtq" Sep 29 19:26:19 crc kubenswrapper[4779]: I0929 19:26:19.738596 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/c5fc66bc-61c8-4f4c-838a-0ff89fbb9b1b-config-data-custom\") pod \"barbican-api-544464c4cd-mfj4x\" (UID: \"c5fc66bc-61c8-4f4c-838a-0ff89fbb9b1b\") " pod="openstack/barbican-api-544464c4cd-mfj4x" Sep 29 19:26:19 crc kubenswrapper[4779]: I0929 19:26:19.738725 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bd571bff-be79-40df-b5bb-408fb8017dcc-config\") pod \"dnsmasq-dns-75c8ddd69c-nhvtq\" (UID: \"bd571bff-be79-40df-b5bb-408fb8017dcc\") " pod="openstack/dnsmasq-dns-75c8ddd69c-nhvtq" Sep 29 19:26:19 crc kubenswrapper[4779]: I0929 19:26:19.738816 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c5fc66bc-61c8-4f4c-838a-0ff89fbb9b1b-combined-ca-bundle\") pod \"barbican-api-544464c4cd-mfj4x\" (UID: \"c5fc66bc-61c8-4f4c-838a-0ff89fbb9b1b\") " pod="openstack/barbican-api-544464c4cd-mfj4x" Sep 29 19:26:19 crc kubenswrapper[4779]: I0929 19:26:19.738943 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fsqk4\" (UniqueName: \"kubernetes.io/projected/c5fc66bc-61c8-4f4c-838a-0ff89fbb9b1b-kube-api-access-fsqk4\") pod \"barbican-api-544464c4cd-mfj4x\" (UID: \"c5fc66bc-61c8-4f4c-838a-0ff89fbb9b1b\") " pod="openstack/barbican-api-544464c4cd-mfj4x" Sep 29 19:26:19 crc kubenswrapper[4779]: I0929 19:26:19.739041 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c5fc66bc-61c8-4f4c-838a-0ff89fbb9b1b-config-data\") pod \"barbican-api-544464c4cd-mfj4x\" (UID: \"c5fc66bc-61c8-4f4c-838a-0ff89fbb9b1b\") " pod="openstack/barbican-api-544464c4cd-mfj4x" Sep 29 19:26:19 crc kubenswrapper[4779]: I0929 19:26:19.739249 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/bd571bff-be79-40df-b5bb-408fb8017dcc-ovsdbserver-sb\") pod \"dnsmasq-dns-75c8ddd69c-nhvtq\" (UID: \"bd571bff-be79-40df-b5bb-408fb8017dcc\") " pod="openstack/dnsmasq-dns-75c8ddd69c-nhvtq" Sep 29 19:26:19 crc kubenswrapper[4779]: I0929 19:26:19.739336 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/bd571bff-be79-40df-b5bb-408fb8017dcc-dns-svc\") pod \"dnsmasq-dns-75c8ddd69c-nhvtq\" (UID: \"bd571bff-be79-40df-b5bb-408fb8017dcc\") " pod="openstack/dnsmasq-dns-75c8ddd69c-nhvtq" Sep 29 19:26:19 crc kubenswrapper[4779]: I0929 19:26:19.739458 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-msh7x\" (UniqueName: \"kubernetes.io/projected/bd571bff-be79-40df-b5bb-408fb8017dcc-kube-api-access-msh7x\") pod \"dnsmasq-dns-75c8ddd69c-nhvtq\" (UID: \"bd571bff-be79-40df-b5bb-408fb8017dcc\") " pod="openstack/dnsmasq-dns-75c8ddd69c-nhvtq" Sep 29 19:26:19 crc kubenswrapper[4779]: I0929 19:26:19.739540 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/bd571bff-be79-40df-b5bb-408fb8017dcc-dns-swift-storage-0\") pod \"dnsmasq-dns-75c8ddd69c-nhvtq\" (UID: \"bd571bff-be79-40df-b5bb-408fb8017dcc\") " pod="openstack/dnsmasq-dns-75c8ddd69c-nhvtq" Sep 29 19:26:19 crc kubenswrapper[4779]: I0929 19:26:19.739609 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c5fc66bc-61c8-4f4c-838a-0ff89fbb9b1b-logs\") pod \"barbican-api-544464c4cd-mfj4x\" (UID: \"c5fc66bc-61c8-4f4c-838a-0ff89fbb9b1b\") " pod="openstack/barbican-api-544464c4cd-mfj4x" Sep 29 19:26:19 crc kubenswrapper[4779]: I0929 19:26:19.847833 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bd571bff-be79-40df-b5bb-408fb8017dcc-config\") pod \"dnsmasq-dns-75c8ddd69c-nhvtq\" (UID: \"bd571bff-be79-40df-b5bb-408fb8017dcc\") " pod="openstack/dnsmasq-dns-75c8ddd69c-nhvtq" Sep 29 19:26:19 crc kubenswrapper[4779]: I0929 19:26:19.847887 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c5fc66bc-61c8-4f4c-838a-0ff89fbb9b1b-combined-ca-bundle\") pod \"barbican-api-544464c4cd-mfj4x\" (UID: \"c5fc66bc-61c8-4f4c-838a-0ff89fbb9b1b\") " pod="openstack/barbican-api-544464c4cd-mfj4x" Sep 29 19:26:19 crc kubenswrapper[4779]: I0929 19:26:19.847912 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fsqk4\" (UniqueName: \"kubernetes.io/projected/c5fc66bc-61c8-4f4c-838a-0ff89fbb9b1b-kube-api-access-fsqk4\") pod \"barbican-api-544464c4cd-mfj4x\" (UID: \"c5fc66bc-61c8-4f4c-838a-0ff89fbb9b1b\") " pod="openstack/barbican-api-544464c4cd-mfj4x" Sep 29 19:26:19 crc kubenswrapper[4779]: I0929 19:26:19.847932 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c5fc66bc-61c8-4f4c-838a-0ff89fbb9b1b-config-data\") pod \"barbican-api-544464c4cd-mfj4x\" (UID: \"c5fc66bc-61c8-4f4c-838a-0ff89fbb9b1b\") " pod="openstack/barbican-api-544464c4cd-mfj4x" Sep 29 19:26:19 crc kubenswrapper[4779]: I0929 19:26:19.847954 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/bd571bff-be79-40df-b5bb-408fb8017dcc-ovsdbserver-sb\") pod \"dnsmasq-dns-75c8ddd69c-nhvtq\" (UID: \"bd571bff-be79-40df-b5bb-408fb8017dcc\") " pod="openstack/dnsmasq-dns-75c8ddd69c-nhvtq" Sep 29 19:26:19 crc kubenswrapper[4779]: I0929 19:26:19.847971 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/bd571bff-be79-40df-b5bb-408fb8017dcc-dns-svc\") pod \"dnsmasq-dns-75c8ddd69c-nhvtq\" (UID: \"bd571bff-be79-40df-b5bb-408fb8017dcc\") " pod="openstack/dnsmasq-dns-75c8ddd69c-nhvtq" Sep 29 19:26:19 crc kubenswrapper[4779]: I0929 19:26:19.848011 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-msh7x\" (UniqueName: \"kubernetes.io/projected/bd571bff-be79-40df-b5bb-408fb8017dcc-kube-api-access-msh7x\") pod \"dnsmasq-dns-75c8ddd69c-nhvtq\" (UID: \"bd571bff-be79-40df-b5bb-408fb8017dcc\") " pod="openstack/dnsmasq-dns-75c8ddd69c-nhvtq" Sep 29 19:26:19 crc kubenswrapper[4779]: I0929 19:26:19.848032 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/bd571bff-be79-40df-b5bb-408fb8017dcc-dns-swift-storage-0\") pod \"dnsmasq-dns-75c8ddd69c-nhvtq\" (UID: \"bd571bff-be79-40df-b5bb-408fb8017dcc\") " pod="openstack/dnsmasq-dns-75c8ddd69c-nhvtq" Sep 29 19:26:19 crc kubenswrapper[4779]: I0929 19:26:19.848050 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c5fc66bc-61c8-4f4c-838a-0ff89fbb9b1b-logs\") pod \"barbican-api-544464c4cd-mfj4x\" (UID: \"c5fc66bc-61c8-4f4c-838a-0ff89fbb9b1b\") " pod="openstack/barbican-api-544464c4cd-mfj4x" Sep 29 19:26:19 crc kubenswrapper[4779]: I0929 19:26:19.848103 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/bd571bff-be79-40df-b5bb-408fb8017dcc-ovsdbserver-nb\") pod \"dnsmasq-dns-75c8ddd69c-nhvtq\" (UID: \"bd571bff-be79-40df-b5bb-408fb8017dcc\") " pod="openstack/dnsmasq-dns-75c8ddd69c-nhvtq" Sep 29 19:26:19 crc kubenswrapper[4779]: I0929 19:26:19.848126 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/c5fc66bc-61c8-4f4c-838a-0ff89fbb9b1b-config-data-custom\") pod \"barbican-api-544464c4cd-mfj4x\" (UID: \"c5fc66bc-61c8-4f4c-838a-0ff89fbb9b1b\") " pod="openstack/barbican-api-544464c4cd-mfj4x" Sep 29 19:26:19 crc kubenswrapper[4779]: I0929 19:26:19.851527 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/bd571bff-be79-40df-b5bb-408fb8017dcc-ovsdbserver-sb\") pod \"dnsmasq-dns-75c8ddd69c-nhvtq\" (UID: \"bd571bff-be79-40df-b5bb-408fb8017dcc\") " pod="openstack/dnsmasq-dns-75c8ddd69c-nhvtq" Sep 29 19:26:19 crc kubenswrapper[4779]: I0929 19:26:19.853698 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/bd571bff-be79-40df-b5bb-408fb8017dcc-dns-svc\") pod \"dnsmasq-dns-75c8ddd69c-nhvtq\" (UID: \"bd571bff-be79-40df-b5bb-408fb8017dcc\") " pod="openstack/dnsmasq-dns-75c8ddd69c-nhvtq" Sep 29 19:26:19 crc kubenswrapper[4779]: I0929 19:26:19.853959 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/c5fc66bc-61c8-4f4c-838a-0ff89fbb9b1b-config-data-custom\") pod \"barbican-api-544464c4cd-mfj4x\" (UID: \"c5fc66bc-61c8-4f4c-838a-0ff89fbb9b1b\") " pod="openstack/barbican-api-544464c4cd-mfj4x" Sep 29 19:26:19 crc kubenswrapper[4779]: I0929 19:26:19.854288 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c5fc66bc-61c8-4f4c-838a-0ff89fbb9b1b-logs\") pod \"barbican-api-544464c4cd-mfj4x\" (UID: \"c5fc66bc-61c8-4f4c-838a-0ff89fbb9b1b\") " pod="openstack/barbican-api-544464c4cd-mfj4x" Sep 29 19:26:19 crc kubenswrapper[4779]: I0929 19:26:19.855025 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/bd571bff-be79-40df-b5bb-408fb8017dcc-dns-swift-storage-0\") pod \"dnsmasq-dns-75c8ddd69c-nhvtq\" (UID: \"bd571bff-be79-40df-b5bb-408fb8017dcc\") " pod="openstack/dnsmasq-dns-75c8ddd69c-nhvtq" Sep 29 19:26:19 crc kubenswrapper[4779]: I0929 19:26:19.855744 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/bd571bff-be79-40df-b5bb-408fb8017dcc-ovsdbserver-nb\") pod \"dnsmasq-dns-75c8ddd69c-nhvtq\" (UID: \"bd571bff-be79-40df-b5bb-408fb8017dcc\") " pod="openstack/dnsmasq-dns-75c8ddd69c-nhvtq" Sep 29 19:26:19 crc kubenswrapper[4779]: I0929 19:26:19.859201 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bd571bff-be79-40df-b5bb-408fb8017dcc-config\") pod \"dnsmasq-dns-75c8ddd69c-nhvtq\" (UID: \"bd571bff-be79-40df-b5bb-408fb8017dcc\") " pod="openstack/dnsmasq-dns-75c8ddd69c-nhvtq" Sep 29 19:26:19 crc kubenswrapper[4779]: I0929 19:26:19.861092 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c5fc66bc-61c8-4f4c-838a-0ff89fbb9b1b-config-data\") pod \"barbican-api-544464c4cd-mfj4x\" (UID: \"c5fc66bc-61c8-4f4c-838a-0ff89fbb9b1b\") " pod="openstack/barbican-api-544464c4cd-mfj4x" Sep 29 19:26:19 crc kubenswrapper[4779]: I0929 19:26:19.862084 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c5fc66bc-61c8-4f4c-838a-0ff89fbb9b1b-combined-ca-bundle\") pod \"barbican-api-544464c4cd-mfj4x\" (UID: \"c5fc66bc-61c8-4f4c-838a-0ff89fbb9b1b\") " pod="openstack/barbican-api-544464c4cd-mfj4x" Sep 29 19:26:19 crc kubenswrapper[4779]: I0929 19:26:19.886240 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-msh7x\" (UniqueName: \"kubernetes.io/projected/bd571bff-be79-40df-b5bb-408fb8017dcc-kube-api-access-msh7x\") pod \"dnsmasq-dns-75c8ddd69c-nhvtq\" (UID: \"bd571bff-be79-40df-b5bb-408fb8017dcc\") " pod="openstack/dnsmasq-dns-75c8ddd69c-nhvtq" Sep 29 19:26:19 crc kubenswrapper[4779]: I0929 19:26:19.886529 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fsqk4\" (UniqueName: \"kubernetes.io/projected/c5fc66bc-61c8-4f4c-838a-0ff89fbb9b1b-kube-api-access-fsqk4\") pod \"barbican-api-544464c4cd-mfj4x\" (UID: \"c5fc66bc-61c8-4f4c-838a-0ff89fbb9b1b\") " pod="openstack/barbican-api-544464c4cd-mfj4x" Sep 29 19:26:19 crc kubenswrapper[4779]: I0929 19:26:19.938043 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-75c8ddd69c-nhvtq" Sep 29 19:26:19 crc kubenswrapper[4779]: I0929 19:26:19.944674 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-544464c4cd-mfj4x" Sep 29 19:26:20 crc kubenswrapper[4779]: I0929 19:26:20.169882 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-worker-7bd9b84c75-dphls"] Sep 29 19:26:20 crc kubenswrapper[4779]: I0929 19:26:20.298543 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-keystone-listener-58f6cf64bb-t8fdp"] Sep 29 19:26:20 crc kubenswrapper[4779]: I0929 19:26:20.536476 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-75c8ddd69c-nhvtq"] Sep 29 19:26:20 crc kubenswrapper[4779]: I0929 19:26:20.665681 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-544464c4cd-mfj4x"] Sep 29 19:26:20 crc kubenswrapper[4779]: W0929 19:26:20.672815 4779 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc5fc66bc_61c8_4f4c_838a_0ff89fbb9b1b.slice/crio-579b2152d0212dc8e8e9af5cd495d0bd28e6751190dc8d7d11da3aba1f071076 WatchSource:0}: Error finding container 579b2152d0212dc8e8e9af5cd495d0bd28e6751190dc8d7d11da3aba1f071076: Status 404 returned error can't find the container with id 579b2152d0212dc8e8e9af5cd495d0bd28e6751190dc8d7d11da3aba1f071076 Sep 29 19:26:21 crc kubenswrapper[4779]: I0929 19:26:21.084444 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-544464c4cd-mfj4x" event={"ID":"c5fc66bc-61c8-4f4c-838a-0ff89fbb9b1b","Type":"ContainerStarted","Data":"cdb240ceb8eaeb2910af62209da980e8c86e14a0fc927d62cdbdbeab07081cf7"} Sep 29 19:26:21 crc kubenswrapper[4779]: I0929 19:26:21.084985 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-544464c4cd-mfj4x" Sep 29 19:26:21 crc kubenswrapper[4779]: I0929 19:26:21.085006 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-544464c4cd-mfj4x" event={"ID":"c5fc66bc-61c8-4f4c-838a-0ff89fbb9b1b","Type":"ContainerStarted","Data":"af5251944fd6d520afeac767ee3873c44eb12ca258f31d70f801e11d2cf19e0f"} Sep 29 19:26:21 crc kubenswrapper[4779]: I0929 19:26:21.085044 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-544464c4cd-mfj4x" event={"ID":"c5fc66bc-61c8-4f4c-838a-0ff89fbb9b1b","Type":"ContainerStarted","Data":"579b2152d0212dc8e8e9af5cd495d0bd28e6751190dc8d7d11da3aba1f071076"} Sep 29 19:26:21 crc kubenswrapper[4779]: I0929 19:26:21.085058 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-544464c4cd-mfj4x" Sep 29 19:26:21 crc kubenswrapper[4779]: I0929 19:26:21.087064 4779 generic.go:334] "Generic (PLEG): container finished" podID="bd571bff-be79-40df-b5bb-408fb8017dcc" containerID="cc5ebbf7987ea603703ec0c190e0d20638d9b5a47d01e405497b16715c8491ca" exitCode=0 Sep 29 19:26:21 crc kubenswrapper[4779]: I0929 19:26:21.087124 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-75c8ddd69c-nhvtq" event={"ID":"bd571bff-be79-40df-b5bb-408fb8017dcc","Type":"ContainerDied","Data":"cc5ebbf7987ea603703ec0c190e0d20638d9b5a47d01e405497b16715c8491ca"} Sep 29 19:26:21 crc kubenswrapper[4779]: I0929 19:26:21.087150 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-75c8ddd69c-nhvtq" event={"ID":"bd571bff-be79-40df-b5bb-408fb8017dcc","Type":"ContainerStarted","Data":"17c8a6a7c5626f239cb5098f16d2b18fb8149e45411cb93bb1faf85db17df172"} Sep 29 19:26:21 crc kubenswrapper[4779]: I0929 19:26:21.091217 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-58f6cf64bb-t8fdp" event={"ID":"d3365fba-7e29-4f75-aa74-67ffd7275a15","Type":"ContainerStarted","Data":"1a5905b0e777776a889ee6bdf80efea00408c6e797fd4b164637773da5328cd7"} Sep 29 19:26:21 crc kubenswrapper[4779]: I0929 19:26:21.092264 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-7bd9b84c75-dphls" event={"ID":"7f98eb0b-f8cf-42b7-a6f7-5a803c2be72b","Type":"ContainerStarted","Data":"e9b2739618f523a556cb694e797421c94b2273a5ca300e9a8f5ec1cfdddc4106"} Sep 29 19:26:21 crc kubenswrapper[4779]: I0929 19:26:21.110840 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-api-544464c4cd-mfj4x" podStartSLOduration=2.110822756 podStartE2EDuration="2.110822756s" podCreationTimestamp="2025-09-29 19:26:19 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 19:26:21.104824683 +0000 UTC m=+1091.989249783" watchObservedRunningTime="2025-09-29 19:26:21.110822756 +0000 UTC m=+1091.995247856" Sep 29 19:26:22 crc kubenswrapper[4779]: I0929 19:26:22.008525 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-db-create-mm5l6"] Sep 29 19:26:22 crc kubenswrapper[4779]: I0929 19:26:22.010034 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-mm5l6" Sep 29 19:26:22 crc kubenswrapper[4779]: I0929 19:26:22.017270 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-db-create-mm5l6"] Sep 29 19:26:22 crc kubenswrapper[4779]: I0929 19:26:22.088509 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-95dtx\" (UniqueName: \"kubernetes.io/projected/fa13ed58-de9d-465d-aa87-9306284e4f23-kube-api-access-95dtx\") pod \"nova-api-db-create-mm5l6\" (UID: \"fa13ed58-de9d-465d-aa87-9306284e4f23\") " pod="openstack/nova-api-db-create-mm5l6" Sep 29 19:26:22 crc kubenswrapper[4779]: I0929 19:26:22.112055 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-75c8ddd69c-nhvtq" event={"ID":"bd571bff-be79-40df-b5bb-408fb8017dcc","Type":"ContainerStarted","Data":"e91820a50f95f095c70edd8824f7a3adba40d07a91bb69fe9975d6b080147165"} Sep 29 19:26:22 crc kubenswrapper[4779]: I0929 19:26:22.136996 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Sep 29 19:26:22 crc kubenswrapper[4779]: I0929 19:26:22.137236 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="2e4b85f8-60be-4e1d-9fcd-a227efd711fd" containerName="ceilometer-central-agent" containerID="cri-o://2987f26cacd371de01cf94d756cd9e2992536d73bea4957c97162122b9b43b8a" gracePeriod=30 Sep 29 19:26:22 crc kubenswrapper[4779]: I0929 19:26:22.137965 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="2e4b85f8-60be-4e1d-9fcd-a227efd711fd" containerName="sg-core" containerID="cri-o://d415578bba65d5d24470e52ca7d0a3b4801b581d2265f0736392a3ba8a920675" gracePeriod=30 Sep 29 19:26:22 crc kubenswrapper[4779]: I0929 19:26:22.138109 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="2e4b85f8-60be-4e1d-9fcd-a227efd711fd" containerName="proxy-httpd" containerID="cri-o://d3217bb118bd7b17be40a662079994298427790a37cecb364347611119086955" gracePeriod=30 Sep 29 19:26:22 crc kubenswrapper[4779]: I0929 19:26:22.138162 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="2e4b85f8-60be-4e1d-9fcd-a227efd711fd" containerName="ceilometer-notification-agent" containerID="cri-o://9119ca905316b070b1d8ea07fa2990dbb483f21e8ff2be3166d2866e3ad7197b" gracePeriod=30 Sep 29 19:26:22 crc kubenswrapper[4779]: I0929 19:26:22.155081 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-75c8ddd69c-nhvtq" podStartSLOduration=3.155063658 podStartE2EDuration="3.155063658s" podCreationTimestamp="2025-09-29 19:26:19 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 19:26:22.147544344 +0000 UTC m=+1093.031969444" watchObservedRunningTime="2025-09-29 19:26:22.155063658 +0000 UTC m=+1093.039488758" Sep 29 19:26:22 crc kubenswrapper[4779]: I0929 19:26:22.162125 4779 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ceilometer-0" podUID="2e4b85f8-60be-4e1d-9fcd-a227efd711fd" containerName="proxy-httpd" probeResult="failure" output="Get \"http://10.217.0.154:3000/\": EOF" Sep 29 19:26:22 crc kubenswrapper[4779]: I0929 19:26:22.192502 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-95dtx\" (UniqueName: \"kubernetes.io/projected/fa13ed58-de9d-465d-aa87-9306284e4f23-kube-api-access-95dtx\") pod \"nova-api-db-create-mm5l6\" (UID: \"fa13ed58-de9d-465d-aa87-9306284e4f23\") " pod="openstack/nova-api-db-create-mm5l6" Sep 29 19:26:22 crc kubenswrapper[4779]: I0929 19:26:22.218836 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-95dtx\" (UniqueName: \"kubernetes.io/projected/fa13ed58-de9d-465d-aa87-9306284e4f23-kube-api-access-95dtx\") pod \"nova-api-db-create-mm5l6\" (UID: \"fa13ed58-de9d-465d-aa87-9306284e4f23\") " pod="openstack/nova-api-db-create-mm5l6" Sep 29 19:26:22 crc kubenswrapper[4779]: I0929 19:26:22.240760 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-db-create-ths9f"] Sep 29 19:26:22 crc kubenswrapper[4779]: I0929 19:26:22.241959 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-ths9f" Sep 29 19:26:22 crc kubenswrapper[4779]: I0929 19:26:22.247258 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-db-create-ths9f"] Sep 29 19:26:22 crc kubenswrapper[4779]: I0929 19:26:22.335708 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-mm5l6" Sep 29 19:26:22 crc kubenswrapper[4779]: I0929 19:26:22.337383 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-db-create-gxdgx"] Sep 29 19:26:22 crc kubenswrapper[4779]: I0929 19:26:22.338473 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-gxdgx" Sep 29 19:26:22 crc kubenswrapper[4779]: I0929 19:26:22.399815 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f5xt7\" (UniqueName: \"kubernetes.io/projected/37527e62-544b-42e4-9223-44fe8d4106b2-kube-api-access-f5xt7\") pod \"nova-cell0-db-create-ths9f\" (UID: \"37527e62-544b-42e4-9223-44fe8d4106b2\") " pod="openstack/nova-cell0-db-create-ths9f" Sep 29 19:26:22 crc kubenswrapper[4779]: I0929 19:26:22.413665 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-db-create-gxdgx"] Sep 29 19:26:22 crc kubenswrapper[4779]: I0929 19:26:22.501703 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f5xt7\" (UniqueName: \"kubernetes.io/projected/37527e62-544b-42e4-9223-44fe8d4106b2-kube-api-access-f5xt7\") pod \"nova-cell0-db-create-ths9f\" (UID: \"37527e62-544b-42e4-9223-44fe8d4106b2\") " pod="openstack/nova-cell0-db-create-ths9f" Sep 29 19:26:22 crc kubenswrapper[4779]: I0929 19:26:22.501839 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-txg45\" (UniqueName: \"kubernetes.io/projected/8590a86d-3bde-4c3d-8f3f-52de9414caa6-kube-api-access-txg45\") pod \"nova-cell1-db-create-gxdgx\" (UID: \"8590a86d-3bde-4c3d-8f3f-52de9414caa6\") " pod="openstack/nova-cell1-db-create-gxdgx" Sep 29 19:26:22 crc kubenswrapper[4779]: I0929 19:26:22.523737 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f5xt7\" (UniqueName: \"kubernetes.io/projected/37527e62-544b-42e4-9223-44fe8d4106b2-kube-api-access-f5xt7\") pod \"nova-cell0-db-create-ths9f\" (UID: \"37527e62-544b-42e4-9223-44fe8d4106b2\") " pod="openstack/nova-cell0-db-create-ths9f" Sep 29 19:26:22 crc kubenswrapper[4779]: I0929 19:26:22.587210 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-ths9f" Sep 29 19:26:22 crc kubenswrapper[4779]: I0929 19:26:22.603882 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-txg45\" (UniqueName: \"kubernetes.io/projected/8590a86d-3bde-4c3d-8f3f-52de9414caa6-kube-api-access-txg45\") pod \"nova-cell1-db-create-gxdgx\" (UID: \"8590a86d-3bde-4c3d-8f3f-52de9414caa6\") " pod="openstack/nova-cell1-db-create-gxdgx" Sep 29 19:26:22 crc kubenswrapper[4779]: I0929 19:26:22.627617 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-txg45\" (UniqueName: \"kubernetes.io/projected/8590a86d-3bde-4c3d-8f3f-52de9414caa6-kube-api-access-txg45\") pod \"nova-cell1-db-create-gxdgx\" (UID: \"8590a86d-3bde-4c3d-8f3f-52de9414caa6\") " pod="openstack/nova-cell1-db-create-gxdgx" Sep 29 19:26:22 crc kubenswrapper[4779]: I0929 19:26:22.668614 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-gxdgx" Sep 29 19:26:22 crc kubenswrapper[4779]: I0929 19:26:22.732290 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-proxy-76666bfbfc-dj7qj"] Sep 29 19:26:22 crc kubenswrapper[4779]: I0929 19:26:22.764661 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-api-f48875c8b-24729"] Sep 29 19:26:22 crc kubenswrapper[4779]: I0929 19:26:22.765123 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-proxy-76666bfbfc-dj7qj" Sep 29 19:26:22 crc kubenswrapper[4779]: I0929 19:26:22.765705 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-f48875c8b-24729"] Sep 29 19:26:22 crc kubenswrapper[4779]: I0929 19:26:22.765949 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-f48875c8b-24729" Sep 29 19:26:22 crc kubenswrapper[4779]: I0929 19:26:22.768826 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-swift-public-svc" Sep 29 19:26:22 crc kubenswrapper[4779]: I0929 19:26:22.768972 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-swift-internal-svc" Sep 29 19:26:22 crc kubenswrapper[4779]: I0929 19:26:22.769081 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-barbican-public-svc" Sep 29 19:26:22 crc kubenswrapper[4779]: I0929 19:26:22.769175 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-barbican-internal-svc" Sep 29 19:26:22 crc kubenswrapper[4779]: I0929 19:26:22.769269 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-proxy-config-data" Sep 29 19:26:22 crc kubenswrapper[4779]: I0929 19:26:22.776905 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-proxy-76666bfbfc-dj7qj"] Sep 29 19:26:22 crc kubenswrapper[4779]: I0929 19:26:22.910706 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/06187c54-071a-4a20-adc1-84627f949933-public-tls-certs\") pod \"barbican-api-f48875c8b-24729\" (UID: \"06187c54-071a-4a20-adc1-84627f949933\") " pod="openstack/barbican-api-f48875c8b-24729" Sep 29 19:26:22 crc kubenswrapper[4779]: I0929 19:26:22.910780 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/06187c54-071a-4a20-adc1-84627f949933-config-data-custom\") pod \"barbican-api-f48875c8b-24729\" (UID: \"06187c54-071a-4a20-adc1-84627f949933\") " pod="openstack/barbican-api-f48875c8b-24729" Sep 29 19:26:22 crc kubenswrapper[4779]: I0929 19:26:22.910805 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/1dd6be85-ce64-429a-9197-23450db2e2ad-run-httpd\") pod \"swift-proxy-76666bfbfc-dj7qj\" (UID: \"1dd6be85-ce64-429a-9197-23450db2e2ad\") " pod="openstack/swift-proxy-76666bfbfc-dj7qj" Sep 29 19:26:22 crc kubenswrapper[4779]: I0929 19:26:22.911027 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/06187c54-071a-4a20-adc1-84627f949933-logs\") pod \"barbican-api-f48875c8b-24729\" (UID: \"06187c54-071a-4a20-adc1-84627f949933\") " pod="openstack/barbican-api-f48875c8b-24729" Sep 29 19:26:22 crc kubenswrapper[4779]: I0929 19:26:22.911068 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/1dd6be85-ce64-429a-9197-23450db2e2ad-internal-tls-certs\") pod \"swift-proxy-76666bfbfc-dj7qj\" (UID: \"1dd6be85-ce64-429a-9197-23450db2e2ad\") " pod="openstack/swift-proxy-76666bfbfc-dj7qj" Sep 29 19:26:22 crc kubenswrapper[4779]: I0929 19:26:22.911109 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/1dd6be85-ce64-429a-9197-23450db2e2ad-public-tls-certs\") pod \"swift-proxy-76666bfbfc-dj7qj\" (UID: \"1dd6be85-ce64-429a-9197-23450db2e2ad\") " pod="openstack/swift-proxy-76666bfbfc-dj7qj" Sep 29 19:26:22 crc kubenswrapper[4779]: I0929 19:26:22.911252 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1dd6be85-ce64-429a-9197-23450db2e2ad-config-data\") pod \"swift-proxy-76666bfbfc-dj7qj\" (UID: \"1dd6be85-ce64-429a-9197-23450db2e2ad\") " pod="openstack/swift-proxy-76666bfbfc-dj7qj" Sep 29 19:26:22 crc kubenswrapper[4779]: I0929 19:26:22.911297 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bdqbd\" (UniqueName: \"kubernetes.io/projected/06187c54-071a-4a20-adc1-84627f949933-kube-api-access-bdqbd\") pod \"barbican-api-f48875c8b-24729\" (UID: \"06187c54-071a-4a20-adc1-84627f949933\") " pod="openstack/barbican-api-f48875c8b-24729" Sep 29 19:26:22 crc kubenswrapper[4779]: I0929 19:26:22.911359 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rg8gx\" (UniqueName: \"kubernetes.io/projected/1dd6be85-ce64-429a-9197-23450db2e2ad-kube-api-access-rg8gx\") pod \"swift-proxy-76666bfbfc-dj7qj\" (UID: \"1dd6be85-ce64-429a-9197-23450db2e2ad\") " pod="openstack/swift-proxy-76666bfbfc-dj7qj" Sep 29 19:26:22 crc kubenswrapper[4779]: I0929 19:26:22.911412 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1dd6be85-ce64-429a-9197-23450db2e2ad-combined-ca-bundle\") pod \"swift-proxy-76666bfbfc-dj7qj\" (UID: \"1dd6be85-ce64-429a-9197-23450db2e2ad\") " pod="openstack/swift-proxy-76666bfbfc-dj7qj" Sep 29 19:26:22 crc kubenswrapper[4779]: I0929 19:26:22.911454 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/06187c54-071a-4a20-adc1-84627f949933-internal-tls-certs\") pod \"barbican-api-f48875c8b-24729\" (UID: \"06187c54-071a-4a20-adc1-84627f949933\") " pod="openstack/barbican-api-f48875c8b-24729" Sep 29 19:26:22 crc kubenswrapper[4779]: I0929 19:26:22.911504 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/06187c54-071a-4a20-adc1-84627f949933-config-data\") pod \"barbican-api-f48875c8b-24729\" (UID: \"06187c54-071a-4a20-adc1-84627f949933\") " pod="openstack/barbican-api-f48875c8b-24729" Sep 29 19:26:22 crc kubenswrapper[4779]: I0929 19:26:22.911543 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/06187c54-071a-4a20-adc1-84627f949933-combined-ca-bundle\") pod \"barbican-api-f48875c8b-24729\" (UID: \"06187c54-071a-4a20-adc1-84627f949933\") " pod="openstack/barbican-api-f48875c8b-24729" Sep 29 19:26:22 crc kubenswrapper[4779]: I0929 19:26:22.911575 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/1dd6be85-ce64-429a-9197-23450db2e2ad-etc-swift\") pod \"swift-proxy-76666bfbfc-dj7qj\" (UID: \"1dd6be85-ce64-429a-9197-23450db2e2ad\") " pod="openstack/swift-proxy-76666bfbfc-dj7qj" Sep 29 19:26:22 crc kubenswrapper[4779]: I0929 19:26:22.911604 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/1dd6be85-ce64-429a-9197-23450db2e2ad-log-httpd\") pod \"swift-proxy-76666bfbfc-dj7qj\" (UID: \"1dd6be85-ce64-429a-9197-23450db2e2ad\") " pod="openstack/swift-proxy-76666bfbfc-dj7qj" Sep 29 19:26:23 crc kubenswrapper[4779]: I0929 19:26:23.013086 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/06187c54-071a-4a20-adc1-84627f949933-logs\") pod \"barbican-api-f48875c8b-24729\" (UID: \"06187c54-071a-4a20-adc1-84627f949933\") " pod="openstack/barbican-api-f48875c8b-24729" Sep 29 19:26:23 crc kubenswrapper[4779]: I0929 19:26:23.013136 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/1dd6be85-ce64-429a-9197-23450db2e2ad-internal-tls-certs\") pod \"swift-proxy-76666bfbfc-dj7qj\" (UID: \"1dd6be85-ce64-429a-9197-23450db2e2ad\") " pod="openstack/swift-proxy-76666bfbfc-dj7qj" Sep 29 19:26:23 crc kubenswrapper[4779]: I0929 19:26:23.013159 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/1dd6be85-ce64-429a-9197-23450db2e2ad-public-tls-certs\") pod \"swift-proxy-76666bfbfc-dj7qj\" (UID: \"1dd6be85-ce64-429a-9197-23450db2e2ad\") " pod="openstack/swift-proxy-76666bfbfc-dj7qj" Sep 29 19:26:23 crc kubenswrapper[4779]: I0929 19:26:23.013205 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1dd6be85-ce64-429a-9197-23450db2e2ad-config-data\") pod \"swift-proxy-76666bfbfc-dj7qj\" (UID: \"1dd6be85-ce64-429a-9197-23450db2e2ad\") " pod="openstack/swift-proxy-76666bfbfc-dj7qj" Sep 29 19:26:23 crc kubenswrapper[4779]: I0929 19:26:23.013228 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bdqbd\" (UniqueName: \"kubernetes.io/projected/06187c54-071a-4a20-adc1-84627f949933-kube-api-access-bdqbd\") pod \"barbican-api-f48875c8b-24729\" (UID: \"06187c54-071a-4a20-adc1-84627f949933\") " pod="openstack/barbican-api-f48875c8b-24729" Sep 29 19:26:23 crc kubenswrapper[4779]: I0929 19:26:23.013255 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rg8gx\" (UniqueName: \"kubernetes.io/projected/1dd6be85-ce64-429a-9197-23450db2e2ad-kube-api-access-rg8gx\") pod \"swift-proxy-76666bfbfc-dj7qj\" (UID: \"1dd6be85-ce64-429a-9197-23450db2e2ad\") " pod="openstack/swift-proxy-76666bfbfc-dj7qj" Sep 29 19:26:23 crc kubenswrapper[4779]: I0929 19:26:23.013283 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1dd6be85-ce64-429a-9197-23450db2e2ad-combined-ca-bundle\") pod \"swift-proxy-76666bfbfc-dj7qj\" (UID: \"1dd6be85-ce64-429a-9197-23450db2e2ad\") " pod="openstack/swift-proxy-76666bfbfc-dj7qj" Sep 29 19:26:23 crc kubenswrapper[4779]: I0929 19:26:23.013308 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/06187c54-071a-4a20-adc1-84627f949933-internal-tls-certs\") pod \"barbican-api-f48875c8b-24729\" (UID: \"06187c54-071a-4a20-adc1-84627f949933\") " pod="openstack/barbican-api-f48875c8b-24729" Sep 29 19:26:23 crc kubenswrapper[4779]: I0929 19:26:23.013348 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/06187c54-071a-4a20-adc1-84627f949933-config-data\") pod \"barbican-api-f48875c8b-24729\" (UID: \"06187c54-071a-4a20-adc1-84627f949933\") " pod="openstack/barbican-api-f48875c8b-24729" Sep 29 19:26:23 crc kubenswrapper[4779]: I0929 19:26:23.013567 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/06187c54-071a-4a20-adc1-84627f949933-combined-ca-bundle\") pod \"barbican-api-f48875c8b-24729\" (UID: \"06187c54-071a-4a20-adc1-84627f949933\") " pod="openstack/barbican-api-f48875c8b-24729" Sep 29 19:26:23 crc kubenswrapper[4779]: I0929 19:26:23.013590 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/1dd6be85-ce64-429a-9197-23450db2e2ad-etc-swift\") pod \"swift-proxy-76666bfbfc-dj7qj\" (UID: \"1dd6be85-ce64-429a-9197-23450db2e2ad\") " pod="openstack/swift-proxy-76666bfbfc-dj7qj" Sep 29 19:26:23 crc kubenswrapper[4779]: I0929 19:26:23.013611 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/1dd6be85-ce64-429a-9197-23450db2e2ad-log-httpd\") pod \"swift-proxy-76666bfbfc-dj7qj\" (UID: \"1dd6be85-ce64-429a-9197-23450db2e2ad\") " pod="openstack/swift-proxy-76666bfbfc-dj7qj" Sep 29 19:26:23 crc kubenswrapper[4779]: I0929 19:26:23.013670 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/06187c54-071a-4a20-adc1-84627f949933-public-tls-certs\") pod \"barbican-api-f48875c8b-24729\" (UID: \"06187c54-071a-4a20-adc1-84627f949933\") " pod="openstack/barbican-api-f48875c8b-24729" Sep 29 19:26:23 crc kubenswrapper[4779]: I0929 19:26:23.013698 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/1dd6be85-ce64-429a-9197-23450db2e2ad-run-httpd\") pod \"swift-proxy-76666bfbfc-dj7qj\" (UID: \"1dd6be85-ce64-429a-9197-23450db2e2ad\") " pod="openstack/swift-proxy-76666bfbfc-dj7qj" Sep 29 19:26:23 crc kubenswrapper[4779]: I0929 19:26:23.013715 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/06187c54-071a-4a20-adc1-84627f949933-config-data-custom\") pod \"barbican-api-f48875c8b-24729\" (UID: \"06187c54-071a-4a20-adc1-84627f949933\") " pod="openstack/barbican-api-f48875c8b-24729" Sep 29 19:26:23 crc kubenswrapper[4779]: I0929 19:26:23.016899 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/1dd6be85-ce64-429a-9197-23450db2e2ad-log-httpd\") pod \"swift-proxy-76666bfbfc-dj7qj\" (UID: \"1dd6be85-ce64-429a-9197-23450db2e2ad\") " pod="openstack/swift-proxy-76666bfbfc-dj7qj" Sep 29 19:26:23 crc kubenswrapper[4779]: I0929 19:26:23.017459 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/1dd6be85-ce64-429a-9197-23450db2e2ad-run-httpd\") pod \"swift-proxy-76666bfbfc-dj7qj\" (UID: \"1dd6be85-ce64-429a-9197-23450db2e2ad\") " pod="openstack/swift-proxy-76666bfbfc-dj7qj" Sep 29 19:26:23 crc kubenswrapper[4779]: I0929 19:26:23.017623 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/06187c54-071a-4a20-adc1-84627f949933-logs\") pod \"barbican-api-f48875c8b-24729\" (UID: \"06187c54-071a-4a20-adc1-84627f949933\") " pod="openstack/barbican-api-f48875c8b-24729" Sep 29 19:26:23 crc kubenswrapper[4779]: I0929 19:26:23.022733 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/1dd6be85-ce64-429a-9197-23450db2e2ad-etc-swift\") pod \"swift-proxy-76666bfbfc-dj7qj\" (UID: \"1dd6be85-ce64-429a-9197-23450db2e2ad\") " pod="openstack/swift-proxy-76666bfbfc-dj7qj" Sep 29 19:26:23 crc kubenswrapper[4779]: I0929 19:26:23.023790 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/1dd6be85-ce64-429a-9197-23450db2e2ad-internal-tls-certs\") pod \"swift-proxy-76666bfbfc-dj7qj\" (UID: \"1dd6be85-ce64-429a-9197-23450db2e2ad\") " pod="openstack/swift-proxy-76666bfbfc-dj7qj" Sep 29 19:26:23 crc kubenswrapper[4779]: I0929 19:26:23.025436 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/06187c54-071a-4a20-adc1-84627f949933-internal-tls-certs\") pod \"barbican-api-f48875c8b-24729\" (UID: \"06187c54-071a-4a20-adc1-84627f949933\") " pod="openstack/barbican-api-f48875c8b-24729" Sep 29 19:26:23 crc kubenswrapper[4779]: I0929 19:26:23.026049 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/06187c54-071a-4a20-adc1-84627f949933-public-tls-certs\") pod \"barbican-api-f48875c8b-24729\" (UID: \"06187c54-071a-4a20-adc1-84627f949933\") " pod="openstack/barbican-api-f48875c8b-24729" Sep 29 19:26:23 crc kubenswrapper[4779]: I0929 19:26:23.026939 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1dd6be85-ce64-429a-9197-23450db2e2ad-config-data\") pod \"swift-proxy-76666bfbfc-dj7qj\" (UID: \"1dd6be85-ce64-429a-9197-23450db2e2ad\") " pod="openstack/swift-proxy-76666bfbfc-dj7qj" Sep 29 19:26:23 crc kubenswrapper[4779]: I0929 19:26:23.027481 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/06187c54-071a-4a20-adc1-84627f949933-config-data-custom\") pod \"barbican-api-f48875c8b-24729\" (UID: \"06187c54-071a-4a20-adc1-84627f949933\") " pod="openstack/barbican-api-f48875c8b-24729" Sep 29 19:26:23 crc kubenswrapper[4779]: I0929 19:26:23.029088 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/06187c54-071a-4a20-adc1-84627f949933-combined-ca-bundle\") pod \"barbican-api-f48875c8b-24729\" (UID: \"06187c54-071a-4a20-adc1-84627f949933\") " pod="openstack/barbican-api-f48875c8b-24729" Sep 29 19:26:23 crc kubenswrapper[4779]: I0929 19:26:23.035259 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1dd6be85-ce64-429a-9197-23450db2e2ad-combined-ca-bundle\") pod \"swift-proxy-76666bfbfc-dj7qj\" (UID: \"1dd6be85-ce64-429a-9197-23450db2e2ad\") " pod="openstack/swift-proxy-76666bfbfc-dj7qj" Sep 29 19:26:23 crc kubenswrapper[4779]: I0929 19:26:23.036137 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/1dd6be85-ce64-429a-9197-23450db2e2ad-public-tls-certs\") pod \"swift-proxy-76666bfbfc-dj7qj\" (UID: \"1dd6be85-ce64-429a-9197-23450db2e2ad\") " pod="openstack/swift-proxy-76666bfbfc-dj7qj" Sep 29 19:26:23 crc kubenswrapper[4779]: I0929 19:26:23.038505 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rg8gx\" (UniqueName: \"kubernetes.io/projected/1dd6be85-ce64-429a-9197-23450db2e2ad-kube-api-access-rg8gx\") pod \"swift-proxy-76666bfbfc-dj7qj\" (UID: \"1dd6be85-ce64-429a-9197-23450db2e2ad\") " pod="openstack/swift-proxy-76666bfbfc-dj7qj" Sep 29 19:26:23 crc kubenswrapper[4779]: I0929 19:26:23.044007 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bdqbd\" (UniqueName: \"kubernetes.io/projected/06187c54-071a-4a20-adc1-84627f949933-kube-api-access-bdqbd\") pod \"barbican-api-f48875c8b-24729\" (UID: \"06187c54-071a-4a20-adc1-84627f949933\") " pod="openstack/barbican-api-f48875c8b-24729" Sep 29 19:26:23 crc kubenswrapper[4779]: I0929 19:26:23.050293 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/06187c54-071a-4a20-adc1-84627f949933-config-data\") pod \"barbican-api-f48875c8b-24729\" (UID: \"06187c54-071a-4a20-adc1-84627f949933\") " pod="openstack/barbican-api-f48875c8b-24729" Sep 29 19:26:23 crc kubenswrapper[4779]: I0929 19:26:23.095379 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-proxy-76666bfbfc-dj7qj" Sep 29 19:26:23 crc kubenswrapper[4779]: I0929 19:26:23.106710 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-f48875c8b-24729" Sep 29 19:26:23 crc kubenswrapper[4779]: I0929 19:26:23.143486 4779 generic.go:334] "Generic (PLEG): container finished" podID="2e4b85f8-60be-4e1d-9fcd-a227efd711fd" containerID="d3217bb118bd7b17be40a662079994298427790a37cecb364347611119086955" exitCode=0 Sep 29 19:26:23 crc kubenswrapper[4779]: I0929 19:26:23.143534 4779 generic.go:334] "Generic (PLEG): container finished" podID="2e4b85f8-60be-4e1d-9fcd-a227efd711fd" containerID="d415578bba65d5d24470e52ca7d0a3b4801b581d2265f0736392a3ba8a920675" exitCode=2 Sep 29 19:26:23 crc kubenswrapper[4779]: I0929 19:26:23.143544 4779 generic.go:334] "Generic (PLEG): container finished" podID="2e4b85f8-60be-4e1d-9fcd-a227efd711fd" containerID="2987f26cacd371de01cf94d756cd9e2992536d73bea4957c97162122b9b43b8a" exitCode=0 Sep 29 19:26:23 crc kubenswrapper[4779]: I0929 19:26:23.143588 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"2e4b85f8-60be-4e1d-9fcd-a227efd711fd","Type":"ContainerDied","Data":"d3217bb118bd7b17be40a662079994298427790a37cecb364347611119086955"} Sep 29 19:26:23 crc kubenswrapper[4779]: I0929 19:26:23.143629 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"2e4b85f8-60be-4e1d-9fcd-a227efd711fd","Type":"ContainerDied","Data":"d415578bba65d5d24470e52ca7d0a3b4801b581d2265f0736392a3ba8a920675"} Sep 29 19:26:23 crc kubenswrapper[4779]: I0929 19:26:23.143645 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"2e4b85f8-60be-4e1d-9fcd-a227efd711fd","Type":"ContainerDied","Data":"2987f26cacd371de01cf94d756cd9e2992536d73bea4957c97162122b9b43b8a"} Sep 29 19:26:23 crc kubenswrapper[4779]: I0929 19:26:23.143763 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-75c8ddd69c-nhvtq" Sep 29 19:26:23 crc kubenswrapper[4779]: I0929 19:26:23.730977 4779 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-6f7f5b6d48-8js86" podUID="442ef8d4-8019-432f-8715-9b2a5aaaa022" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.144:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.144:8443: connect: connection refused" Sep 29 19:26:23 crc kubenswrapper[4779]: I0929 19:26:23.731986 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-db-create-gxdgx"] Sep 29 19:26:23 crc kubenswrapper[4779]: W0929 19:26:23.742422 4779 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8590a86d_3bde_4c3d_8f3f_52de9414caa6.slice/crio-98774e075d7df519942acf7b7d9c7ce5f45a1abc38f456773e5fa23c45d28562 WatchSource:0}: Error finding container 98774e075d7df519942acf7b7d9c7ce5f45a1abc38f456773e5fa23c45d28562: Status 404 returned error can't find the container with id 98774e075d7df519942acf7b7d9c7ce5f45a1abc38f456773e5fa23c45d28562 Sep 29 19:26:23 crc kubenswrapper[4779]: I0929 19:26:23.799945 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-db-create-ths9f"] Sep 29 19:26:23 crc kubenswrapper[4779]: I0929 19:26:23.919607 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-proxy-76666bfbfc-dj7qj"] Sep 29 19:26:23 crc kubenswrapper[4779]: I0929 19:26:23.931629 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-db-create-mm5l6"] Sep 29 19:26:23 crc kubenswrapper[4779]: I0929 19:26:23.972554 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-f48875c8b-24729"] Sep 29 19:26:23 crc kubenswrapper[4779]: W0929 19:26:23.973474 4779 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podfa13ed58_de9d_465d_aa87_9306284e4f23.slice/crio-296c571f4e1fbf39bfd6ec00d77be159ef35eaac508b655035dfd93103b4c5c0 WatchSource:0}: Error finding container 296c571f4e1fbf39bfd6ec00d77be159ef35eaac508b655035dfd93103b4c5c0: Status 404 returned error can't find the container with id 296c571f4e1fbf39bfd6ec00d77be159ef35eaac508b655035dfd93103b4c5c0 Sep 29 19:26:24 crc kubenswrapper[4779]: I0929 19:26:24.160487 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-ths9f" event={"ID":"37527e62-544b-42e4-9223-44fe8d4106b2","Type":"ContainerStarted","Data":"2d4e2d5395e7b2540a24adf0ff321ffe3dcd3b829f1b1b7ddffc6d945c4b4627"} Sep 29 19:26:24 crc kubenswrapper[4779]: I0929 19:26:24.163105 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-gxdgx" event={"ID":"8590a86d-3bde-4c3d-8f3f-52de9414caa6","Type":"ContainerStarted","Data":"98774e075d7df519942acf7b7d9c7ce5f45a1abc38f456773e5fa23c45d28562"} Sep 29 19:26:24 crc kubenswrapper[4779]: I0929 19:26:24.167272 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-58f6cf64bb-t8fdp" event={"ID":"d3365fba-7e29-4f75-aa74-67ffd7275a15","Type":"ContainerStarted","Data":"a09ccae42f7604f60e664e3ddabfc921378d3c3b1b49d07a73f7dc817860444b"} Sep 29 19:26:24 crc kubenswrapper[4779]: I0929 19:26:24.167406 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-58f6cf64bb-t8fdp" event={"ID":"d3365fba-7e29-4f75-aa74-67ffd7275a15","Type":"ContainerStarted","Data":"ae9d4ded65d363fd4d39f0ce592d32525977ebc44aa2df4fe07b8abc3b5ea942"} Sep 29 19:26:24 crc kubenswrapper[4779]: I0929 19:26:24.171771 4779 generic.go:334] "Generic (PLEG): container finished" podID="e2a74779-76d8-4fee-bd24-cb11d5d72915" containerID="49fa49fd5f06670c57a43269ae45c6080feadd8d98820d10f268c747299a8ae2" exitCode=0 Sep 29 19:26:24 crc kubenswrapper[4779]: I0929 19:26:24.171846 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-wnn5c" event={"ID":"e2a74779-76d8-4fee-bd24-cb11d5d72915","Type":"ContainerDied","Data":"49fa49fd5f06670c57a43269ae45c6080feadd8d98820d10f268c747299a8ae2"} Sep 29 19:26:24 crc kubenswrapper[4779]: I0929 19:26:24.173145 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-76666bfbfc-dj7qj" event={"ID":"1dd6be85-ce64-429a-9197-23450db2e2ad","Type":"ContainerStarted","Data":"dbdf3a400e3a127399280bbef85d71eec149c8456e09b3e0f4a44572b7f1cc6d"} Sep 29 19:26:24 crc kubenswrapper[4779]: I0929 19:26:24.175155 4779 generic.go:334] "Generic (PLEG): container finished" podID="2e4b85f8-60be-4e1d-9fcd-a227efd711fd" containerID="9119ca905316b070b1d8ea07fa2990dbb483f21e8ff2be3166d2866e3ad7197b" exitCode=0 Sep 29 19:26:24 crc kubenswrapper[4779]: I0929 19:26:24.175216 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"2e4b85f8-60be-4e1d-9fcd-a227efd711fd","Type":"ContainerDied","Data":"9119ca905316b070b1d8ea07fa2990dbb483f21e8ff2be3166d2866e3ad7197b"} Sep 29 19:26:24 crc kubenswrapper[4779]: I0929 19:26:24.175243 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"2e4b85f8-60be-4e1d-9fcd-a227efd711fd","Type":"ContainerDied","Data":"057f5ff143d56aeacd20a70641c4ec34d577fc072cf73c863abf7ddf5f3514e4"} Sep 29 19:26:24 crc kubenswrapper[4779]: I0929 19:26:24.175256 4779 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="057f5ff143d56aeacd20a70641c4ec34d577fc072cf73c863abf7ddf5f3514e4" Sep 29 19:26:24 crc kubenswrapper[4779]: I0929 19:26:24.176359 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-mm5l6" event={"ID":"fa13ed58-de9d-465d-aa87-9306284e4f23","Type":"ContainerStarted","Data":"296c571f4e1fbf39bfd6ec00d77be159ef35eaac508b655035dfd93103b4c5c0"} Sep 29 19:26:24 crc kubenswrapper[4779]: I0929 19:26:24.178222 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-7bd9b84c75-dphls" event={"ID":"7f98eb0b-f8cf-42b7-a6f7-5a803c2be72b","Type":"ContainerStarted","Data":"10b05b13555fa3dd61611ab983991078d6dda9e6e34b3f843fec13a0880e4c96"} Sep 29 19:26:24 crc kubenswrapper[4779]: I0929 19:26:24.178259 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-7bd9b84c75-dphls" event={"ID":"7f98eb0b-f8cf-42b7-a6f7-5a803c2be72b","Type":"ContainerStarted","Data":"f89a85903f56bc8fa00b93bcf0725a7d76b16e40c6bfe9bdf0eba8cd1171793f"} Sep 29 19:26:24 crc kubenswrapper[4779]: I0929 19:26:24.185856 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-f48875c8b-24729" event={"ID":"06187c54-071a-4a20-adc1-84627f949933","Type":"ContainerStarted","Data":"bf6604de7f9a139da7ada18800095f4a60d265aed52f41d50128f8e1e67b899d"} Sep 29 19:26:24 crc kubenswrapper[4779]: I0929 19:26:24.197482 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-keystone-listener-58f6cf64bb-t8fdp" podStartSLOduration=2.347001127 podStartE2EDuration="5.197460072s" podCreationTimestamp="2025-09-29 19:26:19 +0000 UTC" firstStartedPulling="2025-09-29 19:26:20.300922435 +0000 UTC m=+1091.185347535" lastFinishedPulling="2025-09-29 19:26:23.15138138 +0000 UTC m=+1094.035806480" observedRunningTime="2025-09-29 19:26:24.183905534 +0000 UTC m=+1095.068330644" watchObservedRunningTime="2025-09-29 19:26:24.197460072 +0000 UTC m=+1095.081885172" Sep 29 19:26:24 crc kubenswrapper[4779]: I0929 19:26:24.200425 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 19:26:24 crc kubenswrapper[4779]: I0929 19:26:24.225607 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-worker-7bd9b84c75-dphls" podStartSLOduration=2.355513879 podStartE2EDuration="5.225588376s" podCreationTimestamp="2025-09-29 19:26:19 +0000 UTC" firstStartedPulling="2025-09-29 19:26:20.185640686 +0000 UTC m=+1091.070065786" lastFinishedPulling="2025-09-29 19:26:23.055715183 +0000 UTC m=+1093.940140283" observedRunningTime="2025-09-29 19:26:24.2179954 +0000 UTC m=+1095.102420500" watchObservedRunningTime="2025-09-29 19:26:24.225588376 +0000 UTC m=+1095.110013476" Sep 29 19:26:24 crc kubenswrapper[4779]: I0929 19:26:24.350852 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2e4b85f8-60be-4e1d-9fcd-a227efd711fd-log-httpd\") pod \"2e4b85f8-60be-4e1d-9fcd-a227efd711fd\" (UID: \"2e4b85f8-60be-4e1d-9fcd-a227efd711fd\") " Sep 29 19:26:24 crc kubenswrapper[4779]: I0929 19:26:24.350977 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/2e4b85f8-60be-4e1d-9fcd-a227efd711fd-sg-core-conf-yaml\") pod \"2e4b85f8-60be-4e1d-9fcd-a227efd711fd\" (UID: \"2e4b85f8-60be-4e1d-9fcd-a227efd711fd\") " Sep 29 19:26:24 crc kubenswrapper[4779]: I0929 19:26:24.351032 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2e4b85f8-60be-4e1d-9fcd-a227efd711fd-scripts\") pod \"2e4b85f8-60be-4e1d-9fcd-a227efd711fd\" (UID: \"2e4b85f8-60be-4e1d-9fcd-a227efd711fd\") " Sep 29 19:26:24 crc kubenswrapper[4779]: I0929 19:26:24.351095 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2e4b85f8-60be-4e1d-9fcd-a227efd711fd-combined-ca-bundle\") pod \"2e4b85f8-60be-4e1d-9fcd-a227efd711fd\" (UID: \"2e4b85f8-60be-4e1d-9fcd-a227efd711fd\") " Sep 29 19:26:24 crc kubenswrapper[4779]: I0929 19:26:24.351115 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2e4b85f8-60be-4e1d-9fcd-a227efd711fd-config-data\") pod \"2e4b85f8-60be-4e1d-9fcd-a227efd711fd\" (UID: \"2e4b85f8-60be-4e1d-9fcd-a227efd711fd\") " Sep 29 19:26:24 crc kubenswrapper[4779]: I0929 19:26:24.351137 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2e4b85f8-60be-4e1d-9fcd-a227efd711fd-run-httpd\") pod \"2e4b85f8-60be-4e1d-9fcd-a227efd711fd\" (UID: \"2e4b85f8-60be-4e1d-9fcd-a227efd711fd\") " Sep 29 19:26:24 crc kubenswrapper[4779]: I0929 19:26:24.351163 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fknxd\" (UniqueName: \"kubernetes.io/projected/2e4b85f8-60be-4e1d-9fcd-a227efd711fd-kube-api-access-fknxd\") pod \"2e4b85f8-60be-4e1d-9fcd-a227efd711fd\" (UID: \"2e4b85f8-60be-4e1d-9fcd-a227efd711fd\") " Sep 29 19:26:24 crc kubenswrapper[4779]: I0929 19:26:24.351570 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2e4b85f8-60be-4e1d-9fcd-a227efd711fd-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "2e4b85f8-60be-4e1d-9fcd-a227efd711fd" (UID: "2e4b85f8-60be-4e1d-9fcd-a227efd711fd"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 19:26:24 crc kubenswrapper[4779]: I0929 19:26:24.352106 4779 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2e4b85f8-60be-4e1d-9fcd-a227efd711fd-log-httpd\") on node \"crc\" DevicePath \"\"" Sep 29 19:26:24 crc kubenswrapper[4779]: I0929 19:26:24.355560 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2e4b85f8-60be-4e1d-9fcd-a227efd711fd-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "2e4b85f8-60be-4e1d-9fcd-a227efd711fd" (UID: "2e4b85f8-60be-4e1d-9fcd-a227efd711fd"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 19:26:24 crc kubenswrapper[4779]: I0929 19:26:24.365968 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2e4b85f8-60be-4e1d-9fcd-a227efd711fd-scripts" (OuterVolumeSpecName: "scripts") pod "2e4b85f8-60be-4e1d-9fcd-a227efd711fd" (UID: "2e4b85f8-60be-4e1d-9fcd-a227efd711fd"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:26:24 crc kubenswrapper[4779]: I0929 19:26:24.373829 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2e4b85f8-60be-4e1d-9fcd-a227efd711fd-kube-api-access-fknxd" (OuterVolumeSpecName: "kube-api-access-fknxd") pod "2e4b85f8-60be-4e1d-9fcd-a227efd711fd" (UID: "2e4b85f8-60be-4e1d-9fcd-a227efd711fd"). InnerVolumeSpecName "kube-api-access-fknxd". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:26:24 crc kubenswrapper[4779]: I0929 19:26:24.392472 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2e4b85f8-60be-4e1d-9fcd-a227efd711fd-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "2e4b85f8-60be-4e1d-9fcd-a227efd711fd" (UID: "2e4b85f8-60be-4e1d-9fcd-a227efd711fd"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:26:24 crc kubenswrapper[4779]: I0929 19:26:24.453391 4779 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2e4b85f8-60be-4e1d-9fcd-a227efd711fd-scripts\") on node \"crc\" DevicePath \"\"" Sep 29 19:26:24 crc kubenswrapper[4779]: I0929 19:26:24.453419 4779 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2e4b85f8-60be-4e1d-9fcd-a227efd711fd-run-httpd\") on node \"crc\" DevicePath \"\"" Sep 29 19:26:24 crc kubenswrapper[4779]: I0929 19:26:24.453429 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fknxd\" (UniqueName: \"kubernetes.io/projected/2e4b85f8-60be-4e1d-9fcd-a227efd711fd-kube-api-access-fknxd\") on node \"crc\" DevicePath \"\"" Sep 29 19:26:24 crc kubenswrapper[4779]: I0929 19:26:24.453439 4779 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/2e4b85f8-60be-4e1d-9fcd-a227efd711fd-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Sep 29 19:26:24 crc kubenswrapper[4779]: I0929 19:26:24.468599 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2e4b85f8-60be-4e1d-9fcd-a227efd711fd-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "2e4b85f8-60be-4e1d-9fcd-a227efd711fd" (UID: "2e4b85f8-60be-4e1d-9fcd-a227efd711fd"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:26:24 crc kubenswrapper[4779]: I0929 19:26:24.503589 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2e4b85f8-60be-4e1d-9fcd-a227efd711fd-config-data" (OuterVolumeSpecName: "config-data") pod "2e4b85f8-60be-4e1d-9fcd-a227efd711fd" (UID: "2e4b85f8-60be-4e1d-9fcd-a227efd711fd"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:26:24 crc kubenswrapper[4779]: I0929 19:26:24.554917 4779 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2e4b85f8-60be-4e1d-9fcd-a227efd711fd-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 19:26:24 crc kubenswrapper[4779]: I0929 19:26:24.554949 4779 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2e4b85f8-60be-4e1d-9fcd-a227efd711fd-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 19:26:25 crc kubenswrapper[4779]: I0929 19:26:25.212688 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-76666bfbfc-dj7qj" event={"ID":"1dd6be85-ce64-429a-9197-23450db2e2ad","Type":"ContainerStarted","Data":"28a9c4855ccecffb776e4c580ce42f0de7312c4a599fea8524ed5a2e335dd81a"} Sep 29 19:26:25 crc kubenswrapper[4779]: I0929 19:26:25.213003 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-76666bfbfc-dj7qj" event={"ID":"1dd6be85-ce64-429a-9197-23450db2e2ad","Type":"ContainerStarted","Data":"b0b7ca317df9d79414d37ebcd85b4bff31e5ff7da8a12228180ac0250cc3fd27"} Sep 29 19:26:25 crc kubenswrapper[4779]: I0929 19:26:25.213060 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/swift-proxy-76666bfbfc-dj7qj" Sep 29 19:26:25 crc kubenswrapper[4779]: I0929 19:26:25.213082 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/swift-proxy-76666bfbfc-dj7qj" Sep 29 19:26:25 crc kubenswrapper[4779]: I0929 19:26:25.224681 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-f48875c8b-24729" event={"ID":"06187c54-071a-4a20-adc1-84627f949933","Type":"ContainerStarted","Data":"db23c108350a6dbacad26b149dc7d9eb6c8f9566975f2184902201914d0c74f2"} Sep 29 19:26:25 crc kubenswrapper[4779]: I0929 19:26:25.224727 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-f48875c8b-24729" event={"ID":"06187c54-071a-4a20-adc1-84627f949933","Type":"ContainerStarted","Data":"683224a6fb92a399343d5b9e031580e89c3825935ea4e675e6160637f8ec236e"} Sep 29 19:26:25 crc kubenswrapper[4779]: I0929 19:26:25.224743 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-f48875c8b-24729" Sep 29 19:26:25 crc kubenswrapper[4779]: I0929 19:26:25.224763 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-f48875c8b-24729" Sep 29 19:26:25 crc kubenswrapper[4779]: I0929 19:26:25.232243 4779 generic.go:334] "Generic (PLEG): container finished" podID="37527e62-544b-42e4-9223-44fe8d4106b2" containerID="b94e803bb06946a40005468889e5092af1fce22e2c90407285b4489fe644f865" exitCode=0 Sep 29 19:26:25 crc kubenswrapper[4779]: I0929 19:26:25.232477 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-ths9f" event={"ID":"37527e62-544b-42e4-9223-44fe8d4106b2","Type":"ContainerDied","Data":"b94e803bb06946a40005468889e5092af1fce22e2c90407285b4489fe644f865"} Sep 29 19:26:25 crc kubenswrapper[4779]: I0929 19:26:25.240504 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-proxy-76666bfbfc-dj7qj" podStartSLOduration=3.240488651 podStartE2EDuration="3.240488651s" podCreationTimestamp="2025-09-29 19:26:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 19:26:25.235974319 +0000 UTC m=+1096.120399419" watchObservedRunningTime="2025-09-29 19:26:25.240488651 +0000 UTC m=+1096.124913741" Sep 29 19:26:25 crc kubenswrapper[4779]: I0929 19:26:25.247483 4779 generic.go:334] "Generic (PLEG): container finished" podID="8590a86d-3bde-4c3d-8f3f-52de9414caa6" containerID="84f09b5bf7a2b46771b260161ea05543adbc390690629f2941bf37e0754f405c" exitCode=0 Sep 29 19:26:25 crc kubenswrapper[4779]: I0929 19:26:25.247672 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-gxdgx" event={"ID":"8590a86d-3bde-4c3d-8f3f-52de9414caa6","Type":"ContainerDied","Data":"84f09b5bf7a2b46771b260161ea05543adbc390690629f2941bf37e0754f405c"} Sep 29 19:26:25 crc kubenswrapper[4779]: I0929 19:26:25.254697 4779 generic.go:334] "Generic (PLEG): container finished" podID="fa13ed58-de9d-465d-aa87-9306284e4f23" containerID="5c9078ed1682e25224e0006128779813b1b7eb9d196622e03ab4026ee396f725" exitCode=0 Sep 29 19:26:25 crc kubenswrapper[4779]: I0929 19:26:25.255608 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-mm5l6" event={"ID":"fa13ed58-de9d-465d-aa87-9306284e4f23","Type":"ContainerDied","Data":"5c9078ed1682e25224e0006128779813b1b7eb9d196622e03ab4026ee396f725"} Sep 29 19:26:25 crc kubenswrapper[4779]: I0929 19:26:25.255679 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 19:26:25 crc kubenswrapper[4779]: I0929 19:26:25.289529 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-api-f48875c8b-24729" podStartSLOduration=3.289515862 podStartE2EDuration="3.289515862s" podCreationTimestamp="2025-09-29 19:26:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 19:26:25.28871723 +0000 UTC m=+1096.173142330" watchObservedRunningTime="2025-09-29 19:26:25.289515862 +0000 UTC m=+1096.173940962" Sep 29 19:26:25 crc kubenswrapper[4779]: I0929 19:26:25.320978 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Sep 29 19:26:25 crc kubenswrapper[4779]: I0929 19:26:25.340830 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Sep 29 19:26:25 crc kubenswrapper[4779]: I0929 19:26:25.346769 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Sep 29 19:26:25 crc kubenswrapper[4779]: E0929 19:26:25.347164 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2e4b85f8-60be-4e1d-9fcd-a227efd711fd" containerName="ceilometer-notification-agent" Sep 29 19:26:25 crc kubenswrapper[4779]: I0929 19:26:25.347181 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="2e4b85f8-60be-4e1d-9fcd-a227efd711fd" containerName="ceilometer-notification-agent" Sep 29 19:26:25 crc kubenswrapper[4779]: E0929 19:26:25.347198 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2e4b85f8-60be-4e1d-9fcd-a227efd711fd" containerName="sg-core" Sep 29 19:26:25 crc kubenswrapper[4779]: I0929 19:26:25.347204 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="2e4b85f8-60be-4e1d-9fcd-a227efd711fd" containerName="sg-core" Sep 29 19:26:25 crc kubenswrapper[4779]: E0929 19:26:25.347223 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2e4b85f8-60be-4e1d-9fcd-a227efd711fd" containerName="proxy-httpd" Sep 29 19:26:25 crc kubenswrapper[4779]: I0929 19:26:25.347231 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="2e4b85f8-60be-4e1d-9fcd-a227efd711fd" containerName="proxy-httpd" Sep 29 19:26:25 crc kubenswrapper[4779]: E0929 19:26:25.347255 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2e4b85f8-60be-4e1d-9fcd-a227efd711fd" containerName="ceilometer-central-agent" Sep 29 19:26:25 crc kubenswrapper[4779]: I0929 19:26:25.347262 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="2e4b85f8-60be-4e1d-9fcd-a227efd711fd" containerName="ceilometer-central-agent" Sep 29 19:26:25 crc kubenswrapper[4779]: I0929 19:26:25.347446 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="2e4b85f8-60be-4e1d-9fcd-a227efd711fd" containerName="ceilometer-notification-agent" Sep 29 19:26:25 crc kubenswrapper[4779]: I0929 19:26:25.347455 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="2e4b85f8-60be-4e1d-9fcd-a227efd711fd" containerName="proxy-httpd" Sep 29 19:26:25 crc kubenswrapper[4779]: I0929 19:26:25.347473 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="2e4b85f8-60be-4e1d-9fcd-a227efd711fd" containerName="sg-core" Sep 29 19:26:25 crc kubenswrapper[4779]: I0929 19:26:25.347482 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="2e4b85f8-60be-4e1d-9fcd-a227efd711fd" containerName="ceilometer-central-agent" Sep 29 19:26:25 crc kubenswrapper[4779]: I0929 19:26:25.349294 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 19:26:25 crc kubenswrapper[4779]: I0929 19:26:25.356076 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Sep 29 19:26:25 crc kubenswrapper[4779]: I0929 19:26:25.357046 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Sep 29 19:26:25 crc kubenswrapper[4779]: I0929 19:26:25.367713 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 29 19:26:25 crc kubenswrapper[4779]: I0929 19:26:25.490810 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/00a7767e-dcd8-4e64-97b0-6d5bf4ec280e-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"00a7767e-dcd8-4e64-97b0-6d5bf4ec280e\") " pod="openstack/ceilometer-0" Sep 29 19:26:25 crc kubenswrapper[4779]: I0929 19:26:25.490919 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/00a7767e-dcd8-4e64-97b0-6d5bf4ec280e-scripts\") pod \"ceilometer-0\" (UID: \"00a7767e-dcd8-4e64-97b0-6d5bf4ec280e\") " pod="openstack/ceilometer-0" Sep 29 19:26:25 crc kubenswrapper[4779]: I0929 19:26:25.490946 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r77fs\" (UniqueName: \"kubernetes.io/projected/00a7767e-dcd8-4e64-97b0-6d5bf4ec280e-kube-api-access-r77fs\") pod \"ceilometer-0\" (UID: \"00a7767e-dcd8-4e64-97b0-6d5bf4ec280e\") " pod="openstack/ceilometer-0" Sep 29 19:26:25 crc kubenswrapper[4779]: I0929 19:26:25.490984 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/00a7767e-dcd8-4e64-97b0-6d5bf4ec280e-config-data\") pod \"ceilometer-0\" (UID: \"00a7767e-dcd8-4e64-97b0-6d5bf4ec280e\") " pod="openstack/ceilometer-0" Sep 29 19:26:25 crc kubenswrapper[4779]: I0929 19:26:25.491010 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/00a7767e-dcd8-4e64-97b0-6d5bf4ec280e-log-httpd\") pod \"ceilometer-0\" (UID: \"00a7767e-dcd8-4e64-97b0-6d5bf4ec280e\") " pod="openstack/ceilometer-0" Sep 29 19:26:25 crc kubenswrapper[4779]: I0929 19:26:25.491064 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/00a7767e-dcd8-4e64-97b0-6d5bf4ec280e-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"00a7767e-dcd8-4e64-97b0-6d5bf4ec280e\") " pod="openstack/ceilometer-0" Sep 29 19:26:25 crc kubenswrapper[4779]: I0929 19:26:25.491114 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/00a7767e-dcd8-4e64-97b0-6d5bf4ec280e-run-httpd\") pod \"ceilometer-0\" (UID: \"00a7767e-dcd8-4e64-97b0-6d5bf4ec280e\") " pod="openstack/ceilometer-0" Sep 29 19:26:25 crc kubenswrapper[4779]: I0929 19:26:25.595206 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/00a7767e-dcd8-4e64-97b0-6d5bf4ec280e-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"00a7767e-dcd8-4e64-97b0-6d5bf4ec280e\") " pod="openstack/ceilometer-0" Sep 29 19:26:25 crc kubenswrapper[4779]: I0929 19:26:25.595258 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/00a7767e-dcd8-4e64-97b0-6d5bf4ec280e-scripts\") pod \"ceilometer-0\" (UID: \"00a7767e-dcd8-4e64-97b0-6d5bf4ec280e\") " pod="openstack/ceilometer-0" Sep 29 19:26:25 crc kubenswrapper[4779]: I0929 19:26:25.595277 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r77fs\" (UniqueName: \"kubernetes.io/projected/00a7767e-dcd8-4e64-97b0-6d5bf4ec280e-kube-api-access-r77fs\") pod \"ceilometer-0\" (UID: \"00a7767e-dcd8-4e64-97b0-6d5bf4ec280e\") " pod="openstack/ceilometer-0" Sep 29 19:26:25 crc kubenswrapper[4779]: I0929 19:26:25.595304 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/00a7767e-dcd8-4e64-97b0-6d5bf4ec280e-config-data\") pod \"ceilometer-0\" (UID: \"00a7767e-dcd8-4e64-97b0-6d5bf4ec280e\") " pod="openstack/ceilometer-0" Sep 29 19:26:25 crc kubenswrapper[4779]: I0929 19:26:25.595338 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/00a7767e-dcd8-4e64-97b0-6d5bf4ec280e-log-httpd\") pod \"ceilometer-0\" (UID: \"00a7767e-dcd8-4e64-97b0-6d5bf4ec280e\") " pod="openstack/ceilometer-0" Sep 29 19:26:25 crc kubenswrapper[4779]: I0929 19:26:25.595379 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/00a7767e-dcd8-4e64-97b0-6d5bf4ec280e-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"00a7767e-dcd8-4e64-97b0-6d5bf4ec280e\") " pod="openstack/ceilometer-0" Sep 29 19:26:25 crc kubenswrapper[4779]: I0929 19:26:25.595416 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/00a7767e-dcd8-4e64-97b0-6d5bf4ec280e-run-httpd\") pod \"ceilometer-0\" (UID: \"00a7767e-dcd8-4e64-97b0-6d5bf4ec280e\") " pod="openstack/ceilometer-0" Sep 29 19:26:25 crc kubenswrapper[4779]: I0929 19:26:25.595799 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/00a7767e-dcd8-4e64-97b0-6d5bf4ec280e-run-httpd\") pod \"ceilometer-0\" (UID: \"00a7767e-dcd8-4e64-97b0-6d5bf4ec280e\") " pod="openstack/ceilometer-0" Sep 29 19:26:25 crc kubenswrapper[4779]: I0929 19:26:25.597850 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/00a7767e-dcd8-4e64-97b0-6d5bf4ec280e-log-httpd\") pod \"ceilometer-0\" (UID: \"00a7767e-dcd8-4e64-97b0-6d5bf4ec280e\") " pod="openstack/ceilometer-0" Sep 29 19:26:25 crc kubenswrapper[4779]: I0929 19:26:25.600223 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/00a7767e-dcd8-4e64-97b0-6d5bf4ec280e-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"00a7767e-dcd8-4e64-97b0-6d5bf4ec280e\") " pod="openstack/ceilometer-0" Sep 29 19:26:25 crc kubenswrapper[4779]: I0929 19:26:25.600876 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/00a7767e-dcd8-4e64-97b0-6d5bf4ec280e-config-data\") pod \"ceilometer-0\" (UID: \"00a7767e-dcd8-4e64-97b0-6d5bf4ec280e\") " pod="openstack/ceilometer-0" Sep 29 19:26:25 crc kubenswrapper[4779]: I0929 19:26:25.601256 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/00a7767e-dcd8-4e64-97b0-6d5bf4ec280e-scripts\") pod \"ceilometer-0\" (UID: \"00a7767e-dcd8-4e64-97b0-6d5bf4ec280e\") " pod="openstack/ceilometer-0" Sep 29 19:26:25 crc kubenswrapper[4779]: I0929 19:26:25.610323 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/00a7767e-dcd8-4e64-97b0-6d5bf4ec280e-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"00a7767e-dcd8-4e64-97b0-6d5bf4ec280e\") " pod="openstack/ceilometer-0" Sep 29 19:26:25 crc kubenswrapper[4779]: I0929 19:26:25.614238 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r77fs\" (UniqueName: \"kubernetes.io/projected/00a7767e-dcd8-4e64-97b0-6d5bf4ec280e-kube-api-access-r77fs\") pod \"ceilometer-0\" (UID: \"00a7767e-dcd8-4e64-97b0-6d5bf4ec280e\") " pod="openstack/ceilometer-0" Sep 29 19:26:25 crc kubenswrapper[4779]: I0929 19:26:25.677904 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 19:26:25 crc kubenswrapper[4779]: I0929 19:26:25.770164 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-wnn5c" Sep 29 19:26:25 crc kubenswrapper[4779]: I0929 19:26:25.779936 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2e4b85f8-60be-4e1d-9fcd-a227efd711fd" path="/var/lib/kubelet/pods/2e4b85f8-60be-4e1d-9fcd-a227efd711fd/volumes" Sep 29 19:26:25 crc kubenswrapper[4779]: I0929 19:26:25.906473 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e2a74779-76d8-4fee-bd24-cb11d5d72915-scripts\") pod \"e2a74779-76d8-4fee-bd24-cb11d5d72915\" (UID: \"e2a74779-76d8-4fee-bd24-cb11d5d72915\") " Sep 29 19:26:25 crc kubenswrapper[4779]: I0929 19:26:25.906570 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x4h4h\" (UniqueName: \"kubernetes.io/projected/e2a74779-76d8-4fee-bd24-cb11d5d72915-kube-api-access-x4h4h\") pod \"e2a74779-76d8-4fee-bd24-cb11d5d72915\" (UID: \"e2a74779-76d8-4fee-bd24-cb11d5d72915\") " Sep 29 19:26:25 crc kubenswrapper[4779]: I0929 19:26:25.906598 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e2a74779-76d8-4fee-bd24-cb11d5d72915-config-data\") pod \"e2a74779-76d8-4fee-bd24-cb11d5d72915\" (UID: \"e2a74779-76d8-4fee-bd24-cb11d5d72915\") " Sep 29 19:26:25 crc kubenswrapper[4779]: I0929 19:26:25.906655 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/e2a74779-76d8-4fee-bd24-cb11d5d72915-etc-machine-id\") pod \"e2a74779-76d8-4fee-bd24-cb11d5d72915\" (UID: \"e2a74779-76d8-4fee-bd24-cb11d5d72915\") " Sep 29 19:26:25 crc kubenswrapper[4779]: I0929 19:26:25.906686 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e2a74779-76d8-4fee-bd24-cb11d5d72915-combined-ca-bundle\") pod \"e2a74779-76d8-4fee-bd24-cb11d5d72915\" (UID: \"e2a74779-76d8-4fee-bd24-cb11d5d72915\") " Sep 29 19:26:25 crc kubenswrapper[4779]: I0929 19:26:25.906719 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/e2a74779-76d8-4fee-bd24-cb11d5d72915-db-sync-config-data\") pod \"e2a74779-76d8-4fee-bd24-cb11d5d72915\" (UID: \"e2a74779-76d8-4fee-bd24-cb11d5d72915\") " Sep 29 19:26:25 crc kubenswrapper[4779]: I0929 19:26:25.906957 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/e2a74779-76d8-4fee-bd24-cb11d5d72915-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "e2a74779-76d8-4fee-bd24-cb11d5d72915" (UID: "e2a74779-76d8-4fee-bd24-cb11d5d72915"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 29 19:26:25 crc kubenswrapper[4779]: I0929 19:26:25.907520 4779 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/e2a74779-76d8-4fee-bd24-cb11d5d72915-etc-machine-id\") on node \"crc\" DevicePath \"\"" Sep 29 19:26:25 crc kubenswrapper[4779]: I0929 19:26:25.911841 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e2a74779-76d8-4fee-bd24-cb11d5d72915-kube-api-access-x4h4h" (OuterVolumeSpecName: "kube-api-access-x4h4h") pod "e2a74779-76d8-4fee-bd24-cb11d5d72915" (UID: "e2a74779-76d8-4fee-bd24-cb11d5d72915"). InnerVolumeSpecName "kube-api-access-x4h4h". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:26:25 crc kubenswrapper[4779]: I0929 19:26:25.912373 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e2a74779-76d8-4fee-bd24-cb11d5d72915-scripts" (OuterVolumeSpecName: "scripts") pod "e2a74779-76d8-4fee-bd24-cb11d5d72915" (UID: "e2a74779-76d8-4fee-bd24-cb11d5d72915"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:26:25 crc kubenswrapper[4779]: I0929 19:26:25.922119 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e2a74779-76d8-4fee-bd24-cb11d5d72915-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "e2a74779-76d8-4fee-bd24-cb11d5d72915" (UID: "e2a74779-76d8-4fee-bd24-cb11d5d72915"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:26:25 crc kubenswrapper[4779]: I0929 19:26:25.949007 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e2a74779-76d8-4fee-bd24-cb11d5d72915-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e2a74779-76d8-4fee-bd24-cb11d5d72915" (UID: "e2a74779-76d8-4fee-bd24-cb11d5d72915"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:26:25 crc kubenswrapper[4779]: I0929 19:26:25.971637 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e2a74779-76d8-4fee-bd24-cb11d5d72915-config-data" (OuterVolumeSpecName: "config-data") pod "e2a74779-76d8-4fee-bd24-cb11d5d72915" (UID: "e2a74779-76d8-4fee-bd24-cb11d5d72915"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:26:26 crc kubenswrapper[4779]: I0929 19:26:26.008985 4779 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e2a74779-76d8-4fee-bd24-cb11d5d72915-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 19:26:26 crc kubenswrapper[4779]: I0929 19:26:26.009016 4779 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/e2a74779-76d8-4fee-bd24-cb11d5d72915-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 19:26:26 crc kubenswrapper[4779]: I0929 19:26:26.009028 4779 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e2a74779-76d8-4fee-bd24-cb11d5d72915-scripts\") on node \"crc\" DevicePath \"\"" Sep 29 19:26:26 crc kubenswrapper[4779]: I0929 19:26:26.009037 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x4h4h\" (UniqueName: \"kubernetes.io/projected/e2a74779-76d8-4fee-bd24-cb11d5d72915-kube-api-access-x4h4h\") on node \"crc\" DevicePath \"\"" Sep 29 19:26:26 crc kubenswrapper[4779]: I0929 19:26:26.009047 4779 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e2a74779-76d8-4fee-bd24-cb11d5d72915-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 19:26:26 crc kubenswrapper[4779]: I0929 19:26:26.174107 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 29 19:26:26 crc kubenswrapper[4779]: W0929 19:26:26.188523 4779 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod00a7767e_dcd8_4e64_97b0_6d5bf4ec280e.slice/crio-885f48ef1cf6dc7e0f37cec57a345bc72511344cc1f5dbd516586fde97ecd7c4 WatchSource:0}: Error finding container 885f48ef1cf6dc7e0f37cec57a345bc72511344cc1f5dbd516586fde97ecd7c4: Status 404 returned error can't find the container with id 885f48ef1cf6dc7e0f37cec57a345bc72511344cc1f5dbd516586fde97ecd7c4 Sep 29 19:26:26 crc kubenswrapper[4779]: I0929 19:26:26.282910 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-wnn5c" event={"ID":"e2a74779-76d8-4fee-bd24-cb11d5d72915","Type":"ContainerDied","Data":"96746c5fc080a933c2c9e0301ae13cc197a4ad4231b70cd4f5c37ce7fd3bbb55"} Sep 29 19:26:26 crc kubenswrapper[4779]: I0929 19:26:26.283151 4779 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="96746c5fc080a933c2c9e0301ae13cc197a4ad4231b70cd4f5c37ce7fd3bbb55" Sep 29 19:26:26 crc kubenswrapper[4779]: I0929 19:26:26.283214 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-wnn5c" Sep 29 19:26:26 crc kubenswrapper[4779]: I0929 19:26:26.307516 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"00a7767e-dcd8-4e64-97b0-6d5bf4ec280e","Type":"ContainerStarted","Data":"885f48ef1cf6dc7e0f37cec57a345bc72511344cc1f5dbd516586fde97ecd7c4"} Sep 29 19:26:26 crc kubenswrapper[4779]: I0929 19:26:26.446402 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-scheduler-0"] Sep 29 19:26:26 crc kubenswrapper[4779]: E0929 19:26:26.446798 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e2a74779-76d8-4fee-bd24-cb11d5d72915" containerName="cinder-db-sync" Sep 29 19:26:26 crc kubenswrapper[4779]: I0929 19:26:26.446815 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="e2a74779-76d8-4fee-bd24-cb11d5d72915" containerName="cinder-db-sync" Sep 29 19:26:26 crc kubenswrapper[4779]: I0929 19:26:26.447019 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="e2a74779-76d8-4fee-bd24-cb11d5d72915" containerName="cinder-db-sync" Sep 29 19:26:26 crc kubenswrapper[4779]: I0929 19:26:26.448006 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Sep 29 19:26:26 crc kubenswrapper[4779]: I0929 19:26:26.456871 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scripts" Sep 29 19:26:26 crc kubenswrapper[4779]: I0929 19:26:26.457040 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scheduler-config-data" Sep 29 19:26:26 crc kubenswrapper[4779]: I0929 19:26:26.457229 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-config-data" Sep 29 19:26:26 crc kubenswrapper[4779]: I0929 19:26:26.457682 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-cinder-dockercfg-s6cd5" Sep 29 19:26:26 crc kubenswrapper[4779]: I0929 19:26:26.493065 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Sep 29 19:26:26 crc kubenswrapper[4779]: I0929 19:26:26.517579 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/92130510-fe09-43ca-af98-8242adf45dc4-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"92130510-fe09-43ca-af98-8242adf45dc4\") " pod="openstack/cinder-scheduler-0" Sep 29 19:26:26 crc kubenswrapper[4779]: I0929 19:26:26.517627 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/92130510-fe09-43ca-af98-8242adf45dc4-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"92130510-fe09-43ca-af98-8242adf45dc4\") " pod="openstack/cinder-scheduler-0" Sep 29 19:26:26 crc kubenswrapper[4779]: I0929 19:26:26.517667 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/92130510-fe09-43ca-af98-8242adf45dc4-config-data\") pod \"cinder-scheduler-0\" (UID: \"92130510-fe09-43ca-af98-8242adf45dc4\") " pod="openstack/cinder-scheduler-0" Sep 29 19:26:26 crc kubenswrapper[4779]: I0929 19:26:26.517699 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/92130510-fe09-43ca-af98-8242adf45dc4-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"92130510-fe09-43ca-af98-8242adf45dc4\") " pod="openstack/cinder-scheduler-0" Sep 29 19:26:26 crc kubenswrapper[4779]: I0929 19:26:26.517754 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x6868\" (UniqueName: \"kubernetes.io/projected/92130510-fe09-43ca-af98-8242adf45dc4-kube-api-access-x6868\") pod \"cinder-scheduler-0\" (UID: \"92130510-fe09-43ca-af98-8242adf45dc4\") " pod="openstack/cinder-scheduler-0" Sep 29 19:26:26 crc kubenswrapper[4779]: I0929 19:26:26.517817 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/92130510-fe09-43ca-af98-8242adf45dc4-scripts\") pod \"cinder-scheduler-0\" (UID: \"92130510-fe09-43ca-af98-8242adf45dc4\") " pod="openstack/cinder-scheduler-0" Sep 29 19:26:26 crc kubenswrapper[4779]: I0929 19:26:26.531975 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-75c8ddd69c-nhvtq"] Sep 29 19:26:26 crc kubenswrapper[4779]: I0929 19:26:26.532251 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-75c8ddd69c-nhvtq" podUID="bd571bff-be79-40df-b5bb-408fb8017dcc" containerName="dnsmasq-dns" containerID="cri-o://e91820a50f95f095c70edd8824f7a3adba40d07a91bb69fe9975d6b080147165" gracePeriod=10 Sep 29 19:26:26 crc kubenswrapper[4779]: I0929 19:26:26.534736 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-75c8ddd69c-nhvtq" Sep 29 19:26:26 crc kubenswrapper[4779]: I0929 19:26:26.581217 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5784cf869f-nx69q"] Sep 29 19:26:26 crc kubenswrapper[4779]: I0929 19:26:26.582757 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5784cf869f-nx69q" Sep 29 19:26:26 crc kubenswrapper[4779]: I0929 19:26:26.619868 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/92130510-fe09-43ca-af98-8242adf45dc4-config-data\") pod \"cinder-scheduler-0\" (UID: \"92130510-fe09-43ca-af98-8242adf45dc4\") " pod="openstack/cinder-scheduler-0" Sep 29 19:26:26 crc kubenswrapper[4779]: I0929 19:26:26.619927 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/92130510-fe09-43ca-af98-8242adf45dc4-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"92130510-fe09-43ca-af98-8242adf45dc4\") " pod="openstack/cinder-scheduler-0" Sep 29 19:26:26 crc kubenswrapper[4779]: I0929 19:26:26.619996 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x6868\" (UniqueName: \"kubernetes.io/projected/92130510-fe09-43ca-af98-8242adf45dc4-kube-api-access-x6868\") pod \"cinder-scheduler-0\" (UID: \"92130510-fe09-43ca-af98-8242adf45dc4\") " pod="openstack/cinder-scheduler-0" Sep 29 19:26:26 crc kubenswrapper[4779]: I0929 19:26:26.620072 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/92130510-fe09-43ca-af98-8242adf45dc4-scripts\") pod \"cinder-scheduler-0\" (UID: \"92130510-fe09-43ca-af98-8242adf45dc4\") " pod="openstack/cinder-scheduler-0" Sep 29 19:26:26 crc kubenswrapper[4779]: I0929 19:26:26.620104 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/92130510-fe09-43ca-af98-8242adf45dc4-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"92130510-fe09-43ca-af98-8242adf45dc4\") " pod="openstack/cinder-scheduler-0" Sep 29 19:26:26 crc kubenswrapper[4779]: I0929 19:26:26.620127 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/92130510-fe09-43ca-af98-8242adf45dc4-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"92130510-fe09-43ca-af98-8242adf45dc4\") " pod="openstack/cinder-scheduler-0" Sep 29 19:26:26 crc kubenswrapper[4779]: I0929 19:26:26.630957 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/92130510-fe09-43ca-af98-8242adf45dc4-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"92130510-fe09-43ca-af98-8242adf45dc4\") " pod="openstack/cinder-scheduler-0" Sep 29 19:26:26 crc kubenswrapper[4779]: I0929 19:26:26.642958 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/92130510-fe09-43ca-af98-8242adf45dc4-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"92130510-fe09-43ca-af98-8242adf45dc4\") " pod="openstack/cinder-scheduler-0" Sep 29 19:26:26 crc kubenswrapper[4779]: I0929 19:26:26.645157 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/92130510-fe09-43ca-af98-8242adf45dc4-config-data\") pod \"cinder-scheduler-0\" (UID: \"92130510-fe09-43ca-af98-8242adf45dc4\") " pod="openstack/cinder-scheduler-0" Sep 29 19:26:26 crc kubenswrapper[4779]: I0929 19:26:26.658096 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/92130510-fe09-43ca-af98-8242adf45dc4-scripts\") pod \"cinder-scheduler-0\" (UID: \"92130510-fe09-43ca-af98-8242adf45dc4\") " pod="openstack/cinder-scheduler-0" Sep 29 19:26:26 crc kubenswrapper[4779]: I0929 19:26:26.671998 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x6868\" (UniqueName: \"kubernetes.io/projected/92130510-fe09-43ca-af98-8242adf45dc4-kube-api-access-x6868\") pod \"cinder-scheduler-0\" (UID: \"92130510-fe09-43ca-af98-8242adf45dc4\") " pod="openstack/cinder-scheduler-0" Sep 29 19:26:26 crc kubenswrapper[4779]: I0929 19:26:26.679405 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/92130510-fe09-43ca-af98-8242adf45dc4-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"92130510-fe09-43ca-af98-8242adf45dc4\") " pod="openstack/cinder-scheduler-0" Sep 29 19:26:26 crc kubenswrapper[4779]: I0929 19:26:26.700224 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5784cf869f-nx69q"] Sep 29 19:26:26 crc kubenswrapper[4779]: I0929 19:26:26.724726 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b39d898a-ba9c-4d8f-96eb-6e7cb5bcf703-ovsdbserver-sb\") pod \"dnsmasq-dns-5784cf869f-nx69q\" (UID: \"b39d898a-ba9c-4d8f-96eb-6e7cb5bcf703\") " pod="openstack/dnsmasq-dns-5784cf869f-nx69q" Sep 29 19:26:26 crc kubenswrapper[4779]: I0929 19:26:26.724793 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b39d898a-ba9c-4d8f-96eb-6e7cb5bcf703-ovsdbserver-nb\") pod \"dnsmasq-dns-5784cf869f-nx69q\" (UID: \"b39d898a-ba9c-4d8f-96eb-6e7cb5bcf703\") " pod="openstack/dnsmasq-dns-5784cf869f-nx69q" Sep 29 19:26:26 crc kubenswrapper[4779]: I0929 19:26:26.724824 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b39d898a-ba9c-4d8f-96eb-6e7cb5bcf703-config\") pod \"dnsmasq-dns-5784cf869f-nx69q\" (UID: \"b39d898a-ba9c-4d8f-96eb-6e7cb5bcf703\") " pod="openstack/dnsmasq-dns-5784cf869f-nx69q" Sep 29 19:26:26 crc kubenswrapper[4779]: I0929 19:26:26.724869 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7rntq\" (UniqueName: \"kubernetes.io/projected/b39d898a-ba9c-4d8f-96eb-6e7cb5bcf703-kube-api-access-7rntq\") pod \"dnsmasq-dns-5784cf869f-nx69q\" (UID: \"b39d898a-ba9c-4d8f-96eb-6e7cb5bcf703\") " pod="openstack/dnsmasq-dns-5784cf869f-nx69q" Sep 29 19:26:26 crc kubenswrapper[4779]: I0929 19:26:26.724940 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/b39d898a-ba9c-4d8f-96eb-6e7cb5bcf703-dns-swift-storage-0\") pod \"dnsmasq-dns-5784cf869f-nx69q\" (UID: \"b39d898a-ba9c-4d8f-96eb-6e7cb5bcf703\") " pod="openstack/dnsmasq-dns-5784cf869f-nx69q" Sep 29 19:26:26 crc kubenswrapper[4779]: I0929 19:26:26.724966 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b39d898a-ba9c-4d8f-96eb-6e7cb5bcf703-dns-svc\") pod \"dnsmasq-dns-5784cf869f-nx69q\" (UID: \"b39d898a-ba9c-4d8f-96eb-6e7cb5bcf703\") " pod="openstack/dnsmasq-dns-5784cf869f-nx69q" Sep 29 19:26:26 crc kubenswrapper[4779]: I0929 19:26:26.771441 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Sep 29 19:26:26 crc kubenswrapper[4779]: I0929 19:26:26.797778 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-api-0"] Sep 29 19:26:26 crc kubenswrapper[4779]: I0929 19:26:26.807086 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Sep 29 19:26:26 crc kubenswrapper[4779]: I0929 19:26:26.807188 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Sep 29 19:26:26 crc kubenswrapper[4779]: I0929 19:26:26.810094 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-api-config-data" Sep 29 19:26:26 crc kubenswrapper[4779]: I0929 19:26:26.827009 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b39d898a-ba9c-4d8f-96eb-6e7cb5bcf703-ovsdbserver-sb\") pod \"dnsmasq-dns-5784cf869f-nx69q\" (UID: \"b39d898a-ba9c-4d8f-96eb-6e7cb5bcf703\") " pod="openstack/dnsmasq-dns-5784cf869f-nx69q" Sep 29 19:26:26 crc kubenswrapper[4779]: I0929 19:26:26.827066 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b39d898a-ba9c-4d8f-96eb-6e7cb5bcf703-ovsdbserver-nb\") pod \"dnsmasq-dns-5784cf869f-nx69q\" (UID: \"b39d898a-ba9c-4d8f-96eb-6e7cb5bcf703\") " pod="openstack/dnsmasq-dns-5784cf869f-nx69q" Sep 29 19:26:26 crc kubenswrapper[4779]: I0929 19:26:26.827098 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b39d898a-ba9c-4d8f-96eb-6e7cb5bcf703-config\") pod \"dnsmasq-dns-5784cf869f-nx69q\" (UID: \"b39d898a-ba9c-4d8f-96eb-6e7cb5bcf703\") " pod="openstack/dnsmasq-dns-5784cf869f-nx69q" Sep 29 19:26:26 crc kubenswrapper[4779]: I0929 19:26:26.827129 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7rntq\" (UniqueName: \"kubernetes.io/projected/b39d898a-ba9c-4d8f-96eb-6e7cb5bcf703-kube-api-access-7rntq\") pod \"dnsmasq-dns-5784cf869f-nx69q\" (UID: \"b39d898a-ba9c-4d8f-96eb-6e7cb5bcf703\") " pod="openstack/dnsmasq-dns-5784cf869f-nx69q" Sep 29 19:26:26 crc kubenswrapper[4779]: I0929 19:26:26.827184 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/b39d898a-ba9c-4d8f-96eb-6e7cb5bcf703-dns-swift-storage-0\") pod \"dnsmasq-dns-5784cf869f-nx69q\" (UID: \"b39d898a-ba9c-4d8f-96eb-6e7cb5bcf703\") " pod="openstack/dnsmasq-dns-5784cf869f-nx69q" Sep 29 19:26:26 crc kubenswrapper[4779]: I0929 19:26:26.827205 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b39d898a-ba9c-4d8f-96eb-6e7cb5bcf703-dns-svc\") pod \"dnsmasq-dns-5784cf869f-nx69q\" (UID: \"b39d898a-ba9c-4d8f-96eb-6e7cb5bcf703\") " pod="openstack/dnsmasq-dns-5784cf869f-nx69q" Sep 29 19:26:26 crc kubenswrapper[4779]: I0929 19:26:26.832462 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b39d898a-ba9c-4d8f-96eb-6e7cb5bcf703-config\") pod \"dnsmasq-dns-5784cf869f-nx69q\" (UID: \"b39d898a-ba9c-4d8f-96eb-6e7cb5bcf703\") " pod="openstack/dnsmasq-dns-5784cf869f-nx69q" Sep 29 19:26:26 crc kubenswrapper[4779]: I0929 19:26:26.832960 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b39d898a-ba9c-4d8f-96eb-6e7cb5bcf703-ovsdbserver-sb\") pod \"dnsmasq-dns-5784cf869f-nx69q\" (UID: \"b39d898a-ba9c-4d8f-96eb-6e7cb5bcf703\") " pod="openstack/dnsmasq-dns-5784cf869f-nx69q" Sep 29 19:26:26 crc kubenswrapper[4779]: I0929 19:26:26.834640 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b39d898a-ba9c-4d8f-96eb-6e7cb5bcf703-ovsdbserver-nb\") pod \"dnsmasq-dns-5784cf869f-nx69q\" (UID: \"b39d898a-ba9c-4d8f-96eb-6e7cb5bcf703\") " pod="openstack/dnsmasq-dns-5784cf869f-nx69q" Sep 29 19:26:26 crc kubenswrapper[4779]: I0929 19:26:26.836108 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b39d898a-ba9c-4d8f-96eb-6e7cb5bcf703-dns-svc\") pod \"dnsmasq-dns-5784cf869f-nx69q\" (UID: \"b39d898a-ba9c-4d8f-96eb-6e7cb5bcf703\") " pod="openstack/dnsmasq-dns-5784cf869f-nx69q" Sep 29 19:26:26 crc kubenswrapper[4779]: I0929 19:26:26.843493 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/b39d898a-ba9c-4d8f-96eb-6e7cb5bcf703-dns-swift-storage-0\") pod \"dnsmasq-dns-5784cf869f-nx69q\" (UID: \"b39d898a-ba9c-4d8f-96eb-6e7cb5bcf703\") " pod="openstack/dnsmasq-dns-5784cf869f-nx69q" Sep 29 19:26:26 crc kubenswrapper[4779]: I0929 19:26:26.851144 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7rntq\" (UniqueName: \"kubernetes.io/projected/b39d898a-ba9c-4d8f-96eb-6e7cb5bcf703-kube-api-access-7rntq\") pod \"dnsmasq-dns-5784cf869f-nx69q\" (UID: \"b39d898a-ba9c-4d8f-96eb-6e7cb5bcf703\") " pod="openstack/dnsmasq-dns-5784cf869f-nx69q" Sep 29 19:26:26 crc kubenswrapper[4779]: I0929 19:26:26.897466 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5784cf869f-nx69q" Sep 29 19:26:26 crc kubenswrapper[4779]: I0929 19:26:26.933864 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/17ccef11-d761-408d-9d61-717ea6d7b9c3-logs\") pod \"cinder-api-0\" (UID: \"17ccef11-d761-408d-9d61-717ea6d7b9c3\") " pod="openstack/cinder-api-0" Sep 29 19:26:26 crc kubenswrapper[4779]: I0929 19:26:26.934155 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/17ccef11-d761-408d-9d61-717ea6d7b9c3-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"17ccef11-d761-408d-9d61-717ea6d7b9c3\") " pod="openstack/cinder-api-0" Sep 29 19:26:26 crc kubenswrapper[4779]: I0929 19:26:26.934866 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/17ccef11-d761-408d-9d61-717ea6d7b9c3-etc-machine-id\") pod \"cinder-api-0\" (UID: \"17ccef11-d761-408d-9d61-717ea6d7b9c3\") " pod="openstack/cinder-api-0" Sep 29 19:26:26 crc kubenswrapper[4779]: I0929 19:26:26.934913 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rg2bd\" (UniqueName: \"kubernetes.io/projected/17ccef11-d761-408d-9d61-717ea6d7b9c3-kube-api-access-rg2bd\") pod \"cinder-api-0\" (UID: \"17ccef11-d761-408d-9d61-717ea6d7b9c3\") " pod="openstack/cinder-api-0" Sep 29 19:26:26 crc kubenswrapper[4779]: I0929 19:26:26.934950 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/17ccef11-d761-408d-9d61-717ea6d7b9c3-config-data-custom\") pod \"cinder-api-0\" (UID: \"17ccef11-d761-408d-9d61-717ea6d7b9c3\") " pod="openstack/cinder-api-0" Sep 29 19:26:26 crc kubenswrapper[4779]: I0929 19:26:26.935074 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/17ccef11-d761-408d-9d61-717ea6d7b9c3-config-data\") pod \"cinder-api-0\" (UID: \"17ccef11-d761-408d-9d61-717ea6d7b9c3\") " pod="openstack/cinder-api-0" Sep 29 19:26:26 crc kubenswrapper[4779]: I0929 19:26:26.935101 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/17ccef11-d761-408d-9d61-717ea6d7b9c3-scripts\") pod \"cinder-api-0\" (UID: \"17ccef11-d761-408d-9d61-717ea6d7b9c3\") " pod="openstack/cinder-api-0" Sep 29 19:26:27 crc kubenswrapper[4779]: I0929 19:26:27.036401 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/17ccef11-d761-408d-9d61-717ea6d7b9c3-logs\") pod \"cinder-api-0\" (UID: \"17ccef11-d761-408d-9d61-717ea6d7b9c3\") " pod="openstack/cinder-api-0" Sep 29 19:26:27 crc kubenswrapper[4779]: I0929 19:26:27.036453 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/17ccef11-d761-408d-9d61-717ea6d7b9c3-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"17ccef11-d761-408d-9d61-717ea6d7b9c3\") " pod="openstack/cinder-api-0" Sep 29 19:26:27 crc kubenswrapper[4779]: I0929 19:26:27.036601 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/17ccef11-d761-408d-9d61-717ea6d7b9c3-etc-machine-id\") pod \"cinder-api-0\" (UID: \"17ccef11-d761-408d-9d61-717ea6d7b9c3\") " pod="openstack/cinder-api-0" Sep 29 19:26:27 crc kubenswrapper[4779]: I0929 19:26:27.036626 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rg2bd\" (UniqueName: \"kubernetes.io/projected/17ccef11-d761-408d-9d61-717ea6d7b9c3-kube-api-access-rg2bd\") pod \"cinder-api-0\" (UID: \"17ccef11-d761-408d-9d61-717ea6d7b9c3\") " pod="openstack/cinder-api-0" Sep 29 19:26:27 crc kubenswrapper[4779]: I0929 19:26:27.036682 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/17ccef11-d761-408d-9d61-717ea6d7b9c3-config-data-custom\") pod \"cinder-api-0\" (UID: \"17ccef11-d761-408d-9d61-717ea6d7b9c3\") " pod="openstack/cinder-api-0" Sep 29 19:26:27 crc kubenswrapper[4779]: I0929 19:26:27.036744 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/17ccef11-d761-408d-9d61-717ea6d7b9c3-config-data\") pod \"cinder-api-0\" (UID: \"17ccef11-d761-408d-9d61-717ea6d7b9c3\") " pod="openstack/cinder-api-0" Sep 29 19:26:27 crc kubenswrapper[4779]: I0929 19:26:27.036762 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/17ccef11-d761-408d-9d61-717ea6d7b9c3-scripts\") pod \"cinder-api-0\" (UID: \"17ccef11-d761-408d-9d61-717ea6d7b9c3\") " pod="openstack/cinder-api-0" Sep 29 19:26:27 crc kubenswrapper[4779]: I0929 19:26:27.037582 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/17ccef11-d761-408d-9d61-717ea6d7b9c3-logs\") pod \"cinder-api-0\" (UID: \"17ccef11-d761-408d-9d61-717ea6d7b9c3\") " pod="openstack/cinder-api-0" Sep 29 19:26:27 crc kubenswrapper[4779]: I0929 19:26:27.038651 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/17ccef11-d761-408d-9d61-717ea6d7b9c3-etc-machine-id\") pod \"cinder-api-0\" (UID: \"17ccef11-d761-408d-9d61-717ea6d7b9c3\") " pod="openstack/cinder-api-0" Sep 29 19:26:27 crc kubenswrapper[4779]: I0929 19:26:27.040794 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-mm5l6" Sep 29 19:26:27 crc kubenswrapper[4779]: I0929 19:26:27.042789 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/17ccef11-d761-408d-9d61-717ea6d7b9c3-config-data\") pod \"cinder-api-0\" (UID: \"17ccef11-d761-408d-9d61-717ea6d7b9c3\") " pod="openstack/cinder-api-0" Sep 29 19:26:27 crc kubenswrapper[4779]: I0929 19:26:27.042903 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/17ccef11-d761-408d-9d61-717ea6d7b9c3-config-data-custom\") pod \"cinder-api-0\" (UID: \"17ccef11-d761-408d-9d61-717ea6d7b9c3\") " pod="openstack/cinder-api-0" Sep 29 19:26:27 crc kubenswrapper[4779]: I0929 19:26:27.043191 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/17ccef11-d761-408d-9d61-717ea6d7b9c3-scripts\") pod \"cinder-api-0\" (UID: \"17ccef11-d761-408d-9d61-717ea6d7b9c3\") " pod="openstack/cinder-api-0" Sep 29 19:26:27 crc kubenswrapper[4779]: I0929 19:26:27.047296 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/17ccef11-d761-408d-9d61-717ea6d7b9c3-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"17ccef11-d761-408d-9d61-717ea6d7b9c3\") " pod="openstack/cinder-api-0" Sep 29 19:26:27 crc kubenswrapper[4779]: I0929 19:26:27.066719 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rg2bd\" (UniqueName: \"kubernetes.io/projected/17ccef11-d761-408d-9d61-717ea6d7b9c3-kube-api-access-rg2bd\") pod \"cinder-api-0\" (UID: \"17ccef11-d761-408d-9d61-717ea6d7b9c3\") " pod="openstack/cinder-api-0" Sep 29 19:26:27 crc kubenswrapper[4779]: I0929 19:26:27.141447 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-95dtx\" (UniqueName: \"kubernetes.io/projected/fa13ed58-de9d-465d-aa87-9306284e4f23-kube-api-access-95dtx\") pod \"fa13ed58-de9d-465d-aa87-9306284e4f23\" (UID: \"fa13ed58-de9d-465d-aa87-9306284e4f23\") " Sep 29 19:26:27 crc kubenswrapper[4779]: I0929 19:26:27.147087 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fa13ed58-de9d-465d-aa87-9306284e4f23-kube-api-access-95dtx" (OuterVolumeSpecName: "kube-api-access-95dtx") pod "fa13ed58-de9d-465d-aa87-9306284e4f23" (UID: "fa13ed58-de9d-465d-aa87-9306284e4f23"). InnerVolumeSpecName "kube-api-access-95dtx". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:26:27 crc kubenswrapper[4779]: I0929 19:26:27.209625 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-gxdgx" Sep 29 19:26:27 crc kubenswrapper[4779]: I0929 19:26:27.244028 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Sep 29 19:26:27 crc kubenswrapper[4779]: I0929 19:26:27.244147 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-95dtx\" (UniqueName: \"kubernetes.io/projected/fa13ed58-de9d-465d-aa87-9306284e4f23-kube-api-access-95dtx\") on node \"crc\" DevicePath \"\"" Sep 29 19:26:27 crc kubenswrapper[4779]: I0929 19:26:27.318227 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"00a7767e-dcd8-4e64-97b0-6d5bf4ec280e","Type":"ContainerStarted","Data":"e1e73527fa5092322476e44a18898e4bad17d9da954b7048c3f8440d139315f3"} Sep 29 19:26:27 crc kubenswrapper[4779]: I0929 19:26:27.332752 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-mm5l6" Sep 29 19:26:27 crc kubenswrapper[4779]: I0929 19:26:27.333351 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-mm5l6" event={"ID":"fa13ed58-de9d-465d-aa87-9306284e4f23","Type":"ContainerDied","Data":"296c571f4e1fbf39bfd6ec00d77be159ef35eaac508b655035dfd93103b4c5c0"} Sep 29 19:26:27 crc kubenswrapper[4779]: I0929 19:26:27.333388 4779 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="296c571f4e1fbf39bfd6ec00d77be159ef35eaac508b655035dfd93103b4c5c0" Sep 29 19:26:27 crc kubenswrapper[4779]: I0929 19:26:27.343491 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-ths9f" event={"ID":"37527e62-544b-42e4-9223-44fe8d4106b2","Type":"ContainerDied","Data":"2d4e2d5395e7b2540a24adf0ff321ffe3dcd3b829f1b1b7ddffc6d945c4b4627"} Sep 29 19:26:27 crc kubenswrapper[4779]: I0929 19:26:27.343527 4779 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2d4e2d5395e7b2540a24adf0ff321ffe3dcd3b829f1b1b7ddffc6d945c4b4627" Sep 29 19:26:27 crc kubenswrapper[4779]: I0929 19:26:27.346232 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-txg45\" (UniqueName: \"kubernetes.io/projected/8590a86d-3bde-4c3d-8f3f-52de9414caa6-kube-api-access-txg45\") pod \"8590a86d-3bde-4c3d-8f3f-52de9414caa6\" (UID: \"8590a86d-3bde-4c3d-8f3f-52de9414caa6\") " Sep 29 19:26:27 crc kubenswrapper[4779]: I0929 19:26:27.360015 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8590a86d-3bde-4c3d-8f3f-52de9414caa6-kube-api-access-txg45" (OuterVolumeSpecName: "kube-api-access-txg45") pod "8590a86d-3bde-4c3d-8f3f-52de9414caa6" (UID: "8590a86d-3bde-4c3d-8f3f-52de9414caa6"). InnerVolumeSpecName "kube-api-access-txg45". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:26:27 crc kubenswrapper[4779]: I0929 19:26:27.363346 4779 generic.go:334] "Generic (PLEG): container finished" podID="bd571bff-be79-40df-b5bb-408fb8017dcc" containerID="e91820a50f95f095c70edd8824f7a3adba40d07a91bb69fe9975d6b080147165" exitCode=0 Sep 29 19:26:27 crc kubenswrapper[4779]: I0929 19:26:27.363415 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-75c8ddd69c-nhvtq" event={"ID":"bd571bff-be79-40df-b5bb-408fb8017dcc","Type":"ContainerDied","Data":"e91820a50f95f095c70edd8824f7a3adba40d07a91bb69fe9975d6b080147165"} Sep 29 19:26:27 crc kubenswrapper[4779]: I0929 19:26:27.370693 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-gxdgx" event={"ID":"8590a86d-3bde-4c3d-8f3f-52de9414caa6","Type":"ContainerDied","Data":"98774e075d7df519942acf7b7d9c7ce5f45a1abc38f456773e5fa23c45d28562"} Sep 29 19:26:27 crc kubenswrapper[4779]: I0929 19:26:27.370733 4779 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="98774e075d7df519942acf7b7d9c7ce5f45a1abc38f456773e5fa23c45d28562" Sep 29 19:26:27 crc kubenswrapper[4779]: I0929 19:26:27.370786 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-gxdgx" Sep 29 19:26:27 crc kubenswrapper[4779]: I0929 19:26:27.451982 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-txg45\" (UniqueName: \"kubernetes.io/projected/8590a86d-3bde-4c3d-8f3f-52de9414caa6-kube-api-access-txg45\") on node \"crc\" DevicePath \"\"" Sep 29 19:26:27 crc kubenswrapper[4779]: I0929 19:26:27.468616 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-ths9f" Sep 29 19:26:27 crc kubenswrapper[4779]: I0929 19:26:27.476803 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-75c8ddd69c-nhvtq" Sep 29 19:26:27 crc kubenswrapper[4779]: I0929 19:26:27.555635 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bd571bff-be79-40df-b5bb-408fb8017dcc-config\") pod \"bd571bff-be79-40df-b5bb-408fb8017dcc\" (UID: \"bd571bff-be79-40df-b5bb-408fb8017dcc\") " Sep 29 19:26:27 crc kubenswrapper[4779]: I0929 19:26:27.555903 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/bd571bff-be79-40df-b5bb-408fb8017dcc-ovsdbserver-sb\") pod \"bd571bff-be79-40df-b5bb-408fb8017dcc\" (UID: \"bd571bff-be79-40df-b5bb-408fb8017dcc\") " Sep 29 19:26:27 crc kubenswrapper[4779]: I0929 19:26:27.555980 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/bd571bff-be79-40df-b5bb-408fb8017dcc-dns-svc\") pod \"bd571bff-be79-40df-b5bb-408fb8017dcc\" (UID: \"bd571bff-be79-40df-b5bb-408fb8017dcc\") " Sep 29 19:26:27 crc kubenswrapper[4779]: I0929 19:26:27.556059 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-f5xt7\" (UniqueName: \"kubernetes.io/projected/37527e62-544b-42e4-9223-44fe8d4106b2-kube-api-access-f5xt7\") pod \"37527e62-544b-42e4-9223-44fe8d4106b2\" (UID: \"37527e62-544b-42e4-9223-44fe8d4106b2\") " Sep 29 19:26:27 crc kubenswrapper[4779]: I0929 19:26:27.556223 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-msh7x\" (UniqueName: \"kubernetes.io/projected/bd571bff-be79-40df-b5bb-408fb8017dcc-kube-api-access-msh7x\") pod \"bd571bff-be79-40df-b5bb-408fb8017dcc\" (UID: \"bd571bff-be79-40df-b5bb-408fb8017dcc\") " Sep 29 19:26:27 crc kubenswrapper[4779]: I0929 19:26:27.556350 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/bd571bff-be79-40df-b5bb-408fb8017dcc-ovsdbserver-nb\") pod \"bd571bff-be79-40df-b5bb-408fb8017dcc\" (UID: \"bd571bff-be79-40df-b5bb-408fb8017dcc\") " Sep 29 19:26:27 crc kubenswrapper[4779]: I0929 19:26:27.556526 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/bd571bff-be79-40df-b5bb-408fb8017dcc-dns-swift-storage-0\") pod \"bd571bff-be79-40df-b5bb-408fb8017dcc\" (UID: \"bd571bff-be79-40df-b5bb-408fb8017dcc\") " Sep 29 19:26:27 crc kubenswrapper[4779]: I0929 19:26:27.589765 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/37527e62-544b-42e4-9223-44fe8d4106b2-kube-api-access-f5xt7" (OuterVolumeSpecName: "kube-api-access-f5xt7") pod "37527e62-544b-42e4-9223-44fe8d4106b2" (UID: "37527e62-544b-42e4-9223-44fe8d4106b2"). InnerVolumeSpecName "kube-api-access-f5xt7". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:26:27 crc kubenswrapper[4779]: I0929 19:26:27.595447 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bd571bff-be79-40df-b5bb-408fb8017dcc-kube-api-access-msh7x" (OuterVolumeSpecName: "kube-api-access-msh7x") pod "bd571bff-be79-40df-b5bb-408fb8017dcc" (UID: "bd571bff-be79-40df-b5bb-408fb8017dcc"). InnerVolumeSpecName "kube-api-access-msh7x". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:26:27 crc kubenswrapper[4779]: I0929 19:26:27.648815 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bd571bff-be79-40df-b5bb-408fb8017dcc-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "bd571bff-be79-40df-b5bb-408fb8017dcc" (UID: "bd571bff-be79-40df-b5bb-408fb8017dcc"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:26:27 crc kubenswrapper[4779]: I0929 19:26:27.663428 4779 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/bd571bff-be79-40df-b5bb-408fb8017dcc-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Sep 29 19:26:27 crc kubenswrapper[4779]: I0929 19:26:27.663461 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-f5xt7\" (UniqueName: \"kubernetes.io/projected/37527e62-544b-42e4-9223-44fe8d4106b2-kube-api-access-f5xt7\") on node \"crc\" DevicePath \"\"" Sep 29 19:26:27 crc kubenswrapper[4779]: I0929 19:26:27.663472 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-msh7x\" (UniqueName: \"kubernetes.io/projected/bd571bff-be79-40df-b5bb-408fb8017dcc-kube-api-access-msh7x\") on node \"crc\" DevicePath \"\"" Sep 29 19:26:27 crc kubenswrapper[4779]: I0929 19:26:27.666867 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bd571bff-be79-40df-b5bb-408fb8017dcc-config" (OuterVolumeSpecName: "config") pod "bd571bff-be79-40df-b5bb-408fb8017dcc" (UID: "bd571bff-be79-40df-b5bb-408fb8017dcc"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:26:27 crc kubenswrapper[4779]: I0929 19:26:27.668453 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bd571bff-be79-40df-b5bb-408fb8017dcc-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "bd571bff-be79-40df-b5bb-408fb8017dcc" (UID: "bd571bff-be79-40df-b5bb-408fb8017dcc"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:26:27 crc kubenswrapper[4779]: I0929 19:26:27.675288 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Sep 29 19:26:27 crc kubenswrapper[4779]: I0929 19:26:27.734749 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bd571bff-be79-40df-b5bb-408fb8017dcc-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "bd571bff-be79-40df-b5bb-408fb8017dcc" (UID: "bd571bff-be79-40df-b5bb-408fb8017dcc"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:26:27 crc kubenswrapper[4779]: I0929 19:26:27.737999 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bd571bff-be79-40df-b5bb-408fb8017dcc-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "bd571bff-be79-40df-b5bb-408fb8017dcc" (UID: "bd571bff-be79-40df-b5bb-408fb8017dcc"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:26:27 crc kubenswrapper[4779]: I0929 19:26:27.770712 4779 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/bd571bff-be79-40df-b5bb-408fb8017dcc-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 29 19:26:27 crc kubenswrapper[4779]: I0929 19:26:27.770738 4779 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/bd571bff-be79-40df-b5bb-408fb8017dcc-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Sep 29 19:26:27 crc kubenswrapper[4779]: I0929 19:26:27.770748 4779 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/bd571bff-be79-40df-b5bb-408fb8017dcc-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Sep 29 19:26:27 crc kubenswrapper[4779]: I0929 19:26:27.770757 4779 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bd571bff-be79-40df-b5bb-408fb8017dcc-config\") on node \"crc\" DevicePath \"\"" Sep 29 19:26:27 crc kubenswrapper[4779]: I0929 19:26:27.791866 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5784cf869f-nx69q"] Sep 29 19:26:27 crc kubenswrapper[4779]: I0929 19:26:27.976140 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Sep 29 19:26:28 crc kubenswrapper[4779]: I0929 19:26:28.380535 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Sep 29 19:26:28 crc kubenswrapper[4779]: I0929 19:26:28.382101 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="81b272b7-f428-4787-b48e-3afcf7e4c8d0" containerName="glance-log" containerID="cri-o://84cac98c4a76840282215ff3ab3b592f6ac47af3eaa67cd43f5a9e73129b5d79" gracePeriod=30 Sep 29 19:26:28 crc kubenswrapper[4779]: I0929 19:26:28.382527 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="81b272b7-f428-4787-b48e-3afcf7e4c8d0" containerName="glance-httpd" containerID="cri-o://89810bd9c684160b3b3b03fb5ff83db2b6c73920aef64a82ff698aa35e4760b4" gracePeriod=30 Sep 29 19:26:28 crc kubenswrapper[4779]: I0929 19:26:28.405717 4779 generic.go:334] "Generic (PLEG): container finished" podID="b39d898a-ba9c-4d8f-96eb-6e7cb5bcf703" containerID="ef1d492997760e0a5a2febf8aec640ad661aef8d273d66648e3ead1e77baa80a" exitCode=0 Sep 29 19:26:28 crc kubenswrapper[4779]: I0929 19:26:28.405804 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5784cf869f-nx69q" event={"ID":"b39d898a-ba9c-4d8f-96eb-6e7cb5bcf703","Type":"ContainerDied","Data":"ef1d492997760e0a5a2febf8aec640ad661aef8d273d66648e3ead1e77baa80a"} Sep 29 19:26:28 crc kubenswrapper[4779]: I0929 19:26:28.405834 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5784cf869f-nx69q" event={"ID":"b39d898a-ba9c-4d8f-96eb-6e7cb5bcf703","Type":"ContainerStarted","Data":"dfabc8ae8c47711a922efea5c1fafac05c7d962ec11901497f1ba77ae90c5a1d"} Sep 29 19:26:28 crc kubenswrapper[4779]: I0929 19:26:28.424605 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"92130510-fe09-43ca-af98-8242adf45dc4","Type":"ContainerStarted","Data":"60f04106403839174069b26e72813f96bb3eb3c5af6fd39ba1a19995bd8e42aa"} Sep 29 19:26:28 crc kubenswrapper[4779]: I0929 19:26:28.441275 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"00a7767e-dcd8-4e64-97b0-6d5bf4ec280e","Type":"ContainerStarted","Data":"0ef60d95f076d975cbaa63b5588150976ee184cdf1da3378acf55aa6ef20a00d"} Sep 29 19:26:28 crc kubenswrapper[4779]: I0929 19:26:28.443475 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"17ccef11-d761-408d-9d61-717ea6d7b9c3","Type":"ContainerStarted","Data":"b5653bd506dec244a616ef07ea2445d3b44a3bbcea599a170f8fdb9f0b5d88f5"} Sep 29 19:26:28 crc kubenswrapper[4779]: I0929 19:26:28.449215 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-ths9f" Sep 29 19:26:28 crc kubenswrapper[4779]: I0929 19:26:28.449644 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-75c8ddd69c-nhvtq" Sep 29 19:26:28 crc kubenswrapper[4779]: I0929 19:26:28.450193 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-75c8ddd69c-nhvtq" event={"ID":"bd571bff-be79-40df-b5bb-408fb8017dcc","Type":"ContainerDied","Data":"17c8a6a7c5626f239cb5098f16d2b18fb8149e45411cb93bb1faf85db17df172"} Sep 29 19:26:28 crc kubenswrapper[4779]: I0929 19:26:28.450229 4779 scope.go:117] "RemoveContainer" containerID="e91820a50f95f095c70edd8824f7a3adba40d07a91bb69fe9975d6b080147165" Sep 29 19:26:28 crc kubenswrapper[4779]: I0929 19:26:28.625361 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-75c8ddd69c-nhvtq"] Sep 29 19:26:28 crc kubenswrapper[4779]: I0929 19:26:28.630929 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-75c8ddd69c-nhvtq"] Sep 29 19:26:28 crc kubenswrapper[4779]: I0929 19:26:28.632563 4779 scope.go:117] "RemoveContainer" containerID="cc5ebbf7987ea603703ec0c190e0d20638d9b5a47d01e405497b16715c8491ca" Sep 29 19:26:29 crc kubenswrapper[4779]: I0929 19:26:29.467307 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Sep 29 19:26:29 crc kubenswrapper[4779]: I0929 19:26:29.487529 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5784cf869f-nx69q" event={"ID":"b39d898a-ba9c-4d8f-96eb-6e7cb5bcf703","Type":"ContainerStarted","Data":"1be1062c512f6b7378ae42a72eb7f0e278c7a5b9b52463e15a34d91db5542732"} Sep 29 19:26:29 crc kubenswrapper[4779]: I0929 19:26:29.488872 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-5784cf869f-nx69q" Sep 29 19:26:29 crc kubenswrapper[4779]: I0929 19:26:29.490481 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"92130510-fe09-43ca-af98-8242adf45dc4","Type":"ContainerStarted","Data":"37d18db00a782ab4a30be90646e85b1eb28c893fd2ef59834eab4d14377820df"} Sep 29 19:26:29 crc kubenswrapper[4779]: I0929 19:26:29.501041 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"00a7767e-dcd8-4e64-97b0-6d5bf4ec280e","Type":"ContainerStarted","Data":"a9b8a0fd259cbd970917d192eb68c61afc4e21a0eb7d45ff714b4a84b9278dc8"} Sep 29 19:26:29 crc kubenswrapper[4779]: I0929 19:26:29.505113 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"17ccef11-d761-408d-9d61-717ea6d7b9c3","Type":"ContainerStarted","Data":"5b019a24eedbdf593bb1bd2f0b3540272ff25740bbd4b02324c40eb7cbabbeb0"} Sep 29 19:26:29 crc kubenswrapper[4779]: I0929 19:26:29.512655 4779 generic.go:334] "Generic (PLEG): container finished" podID="81b272b7-f428-4787-b48e-3afcf7e4c8d0" containerID="84cac98c4a76840282215ff3ab3b592f6ac47af3eaa67cd43f5a9e73129b5d79" exitCode=143 Sep 29 19:26:29 crc kubenswrapper[4779]: I0929 19:26:29.512723 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"81b272b7-f428-4787-b48e-3afcf7e4c8d0","Type":"ContainerDied","Data":"84cac98c4a76840282215ff3ab3b592f6ac47af3eaa67cd43f5a9e73129b5d79"} Sep 29 19:26:29 crc kubenswrapper[4779]: I0929 19:26:29.517243 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-5784cf869f-nx69q" podStartSLOduration=3.517183507 podStartE2EDuration="3.517183507s" podCreationTimestamp="2025-09-29 19:26:26 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 19:26:29.514487004 +0000 UTC m=+1100.398912104" watchObservedRunningTime="2025-09-29 19:26:29.517183507 +0000 UTC m=+1100.401608617" Sep 29 19:26:29 crc kubenswrapper[4779]: I0929 19:26:29.783159 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bd571bff-be79-40df-b5bb-408fb8017dcc" path="/var/lib/kubelet/pods/bd571bff-be79-40df-b5bb-408fb8017dcc/volumes" Sep 29 19:26:30 crc kubenswrapper[4779]: I0929 19:26:30.541994 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="17ccef11-d761-408d-9d61-717ea6d7b9c3" containerName="cinder-api-log" containerID="cri-o://5b019a24eedbdf593bb1bd2f0b3540272ff25740bbd4b02324c40eb7cbabbeb0" gracePeriod=30 Sep 29 19:26:30 crc kubenswrapper[4779]: I0929 19:26:30.542312 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"17ccef11-d761-408d-9d61-717ea6d7b9c3","Type":"ContainerStarted","Data":"3c1ce1fb66ff1b227107b892d34f6e351ab88ce89e9547798f0ac843b7dd25cf"} Sep 29 19:26:30 crc kubenswrapper[4779]: I0929 19:26:30.542362 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cinder-api-0" Sep 29 19:26:30 crc kubenswrapper[4779]: I0929 19:26:30.542553 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="17ccef11-d761-408d-9d61-717ea6d7b9c3" containerName="cinder-api" containerID="cri-o://3c1ce1fb66ff1b227107b892d34f6e351ab88ce89e9547798f0ac843b7dd25cf" gracePeriod=30 Sep 29 19:26:30 crc kubenswrapper[4779]: I0929 19:26:30.563447 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-api-0" podStartSLOduration=4.563432534 podStartE2EDuration="4.563432534s" podCreationTimestamp="2025-09-29 19:26:26 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 19:26:30.563065814 +0000 UTC m=+1101.447490914" watchObservedRunningTime="2025-09-29 19:26:30.563432534 +0000 UTC m=+1101.447857634" Sep 29 19:26:30 crc kubenswrapper[4779]: I0929 19:26:30.954966 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Sep 29 19:26:31 crc kubenswrapper[4779]: I0929 19:26:31.335445 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-544464c4cd-mfj4x" Sep 29 19:26:31 crc kubenswrapper[4779]: I0929 19:26:31.535974 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-544464c4cd-mfj4x" Sep 29 19:26:31 crc kubenswrapper[4779]: I0929 19:26:31.551055 4779 generic.go:334] "Generic (PLEG): container finished" podID="17ccef11-d761-408d-9d61-717ea6d7b9c3" containerID="3c1ce1fb66ff1b227107b892d34f6e351ab88ce89e9547798f0ac843b7dd25cf" exitCode=0 Sep 29 19:26:31 crc kubenswrapper[4779]: I0929 19:26:31.551088 4779 generic.go:334] "Generic (PLEG): container finished" podID="17ccef11-d761-408d-9d61-717ea6d7b9c3" containerID="5b019a24eedbdf593bb1bd2f0b3540272ff25740bbd4b02324c40eb7cbabbeb0" exitCode=143 Sep 29 19:26:31 crc kubenswrapper[4779]: I0929 19:26:31.551149 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"17ccef11-d761-408d-9d61-717ea6d7b9c3","Type":"ContainerDied","Data":"3c1ce1fb66ff1b227107b892d34f6e351ab88ce89e9547798f0ac843b7dd25cf"} Sep 29 19:26:31 crc kubenswrapper[4779]: I0929 19:26:31.551209 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"17ccef11-d761-408d-9d61-717ea6d7b9c3","Type":"ContainerDied","Data":"5b019a24eedbdf593bb1bd2f0b3540272ff25740bbd4b02324c40eb7cbabbeb0"} Sep 29 19:26:32 crc kubenswrapper[4779]: I0929 19:26:32.563614 4779 generic.go:334] "Generic (PLEG): container finished" podID="81b272b7-f428-4787-b48e-3afcf7e4c8d0" containerID="89810bd9c684160b3b3b03fb5ff83db2b6c73920aef64a82ff698aa35e4760b4" exitCode=0 Sep 29 19:26:32 crc kubenswrapper[4779]: I0929 19:26:32.563866 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"81b272b7-f428-4787-b48e-3afcf7e4c8d0","Type":"ContainerDied","Data":"89810bd9c684160b3b3b03fb5ff83db2b6c73920aef64a82ff698aa35e4760b4"} Sep 29 19:26:33 crc kubenswrapper[4779]: I0929 19:26:33.100810 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/swift-proxy-76666bfbfc-dj7qj" Sep 29 19:26:33 crc kubenswrapper[4779]: I0929 19:26:33.105038 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/swift-proxy-76666bfbfc-dj7qj" Sep 29 19:26:33 crc kubenswrapper[4779]: I0929 19:26:33.504966 4779 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/glance-default-internal-api-0" podUID="81b272b7-f428-4787-b48e-3afcf7e4c8d0" containerName="glance-httpd" probeResult="failure" output="Get \"https://10.217.0.146:9292/healthcheck\": dial tcp 10.217.0.146:9292: connect: connection refused" Sep 29 19:26:33 crc kubenswrapper[4779]: I0929 19:26:33.504970 4779 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/glance-default-internal-api-0" podUID="81b272b7-f428-4787-b48e-3afcf7e4c8d0" containerName="glance-log" probeResult="failure" output="Get \"https://10.217.0.146:9292/healthcheck\": dial tcp 10.217.0.146:9292: connect: connection refused" Sep 29 19:26:33 crc kubenswrapper[4779]: I0929 19:26:33.730702 4779 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-6f7f5b6d48-8js86" podUID="442ef8d4-8019-432f-8715-9b2a5aaaa022" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.144:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.144:8443: connect: connection refused" Sep 29 19:26:33 crc kubenswrapper[4779]: I0929 19:26:33.730806 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-6f7f5b6d48-8js86" Sep 29 19:26:34 crc kubenswrapper[4779]: I0929 19:26:34.381933 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/neutron-6656fdb884-9h5nb" Sep 29 19:26:35 crc kubenswrapper[4779]: I0929 19:26:35.023438 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-f48875c8b-24729" Sep 29 19:26:35 crc kubenswrapper[4779]: I0929 19:26:35.106612 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-f48875c8b-24729" Sep 29 19:26:35 crc kubenswrapper[4779]: I0929 19:26:35.162221 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-544464c4cd-mfj4x"] Sep 29 19:26:35 crc kubenswrapper[4779]: I0929 19:26:35.162531 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-544464c4cd-mfj4x" podUID="c5fc66bc-61c8-4f4c-838a-0ff89fbb9b1b" containerName="barbican-api-log" containerID="cri-o://af5251944fd6d520afeac767ee3873c44eb12ca258f31d70f801e11d2cf19e0f" gracePeriod=30 Sep 29 19:26:35 crc kubenswrapper[4779]: I0929 19:26:35.163083 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-544464c4cd-mfj4x" podUID="c5fc66bc-61c8-4f4c-838a-0ff89fbb9b1b" containerName="barbican-api" containerID="cri-o://cdb240ceb8eaeb2910af62209da980e8c86e14a0fc927d62cdbdbeab07081cf7" gracePeriod=30 Sep 29 19:26:35 crc kubenswrapper[4779]: I0929 19:26:35.600000 4779 generic.go:334] "Generic (PLEG): container finished" podID="c5fc66bc-61c8-4f4c-838a-0ff89fbb9b1b" containerID="af5251944fd6d520afeac767ee3873c44eb12ca258f31d70f801e11d2cf19e0f" exitCode=143 Sep 29 19:26:35 crc kubenswrapper[4779]: I0929 19:26:35.601021 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-544464c4cd-mfj4x" event={"ID":"c5fc66bc-61c8-4f4c-838a-0ff89fbb9b1b","Type":"ContainerDied","Data":"af5251944fd6d520afeac767ee3873c44eb12ca258f31d70f801e11d2cf19e0f"} Sep 29 19:26:36 crc kubenswrapper[4779]: I0929 19:26:36.902691 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-5784cf869f-nx69q" Sep 29 19:26:36 crc kubenswrapper[4779]: I0929 19:26:36.962403 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-84b966f6c9-vmh6c"] Sep 29 19:26:36 crc kubenswrapper[4779]: I0929 19:26:36.962855 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-84b966f6c9-vmh6c" podUID="7d6613f2-eca3-41c7-86c4-7c2726764f27" containerName="dnsmasq-dns" containerID="cri-o://b79426598752e61c80296293f7cb5311a69cc052b027318304a4d7dd73d32908" gracePeriod=10 Sep 29 19:26:37 crc kubenswrapper[4779]: I0929 19:26:37.140286 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/neutron-d6758dbc9-fppqt" Sep 29 19:26:37 crc kubenswrapper[4779]: I0929 19:26:37.185398 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Sep 29 19:26:37 crc kubenswrapper[4779]: I0929 19:26:37.207274 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-6656fdb884-9h5nb"] Sep 29 19:26:37 crc kubenswrapper[4779]: I0929 19:26:37.207507 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-6656fdb884-9h5nb" podUID="64e12e10-e0f1-4706-b0a2-ec78ce9921a1" containerName="neutron-api" containerID="cri-o://a24a8a0e0e2fdffe46fcebbbaea17fe81b2296d14936502853999c7832cc3700" gracePeriod=30 Sep 29 19:26:37 crc kubenswrapper[4779]: I0929 19:26:37.207633 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-6656fdb884-9h5nb" podUID="64e12e10-e0f1-4706-b0a2-ec78ce9921a1" containerName="neutron-httpd" containerID="cri-o://336940a08ed36f3eff3815dcf956450178f41f48a039feddb49310c234ef6ee2" gracePeriod=30 Sep 29 19:26:37 crc kubenswrapper[4779]: I0929 19:26:37.250000 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/17ccef11-d761-408d-9d61-717ea6d7b9c3-config-data-custom\") pod \"17ccef11-d761-408d-9d61-717ea6d7b9c3\" (UID: \"17ccef11-d761-408d-9d61-717ea6d7b9c3\") " Sep 29 19:26:37 crc kubenswrapper[4779]: I0929 19:26:37.250075 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/17ccef11-d761-408d-9d61-717ea6d7b9c3-config-data\") pod \"17ccef11-d761-408d-9d61-717ea6d7b9c3\" (UID: \"17ccef11-d761-408d-9d61-717ea6d7b9c3\") " Sep 29 19:26:37 crc kubenswrapper[4779]: I0929 19:26:37.250162 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/17ccef11-d761-408d-9d61-717ea6d7b9c3-logs\") pod \"17ccef11-d761-408d-9d61-717ea6d7b9c3\" (UID: \"17ccef11-d761-408d-9d61-717ea6d7b9c3\") " Sep 29 19:26:37 crc kubenswrapper[4779]: I0929 19:26:37.250229 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/17ccef11-d761-408d-9d61-717ea6d7b9c3-scripts\") pod \"17ccef11-d761-408d-9d61-717ea6d7b9c3\" (UID: \"17ccef11-d761-408d-9d61-717ea6d7b9c3\") " Sep 29 19:26:37 crc kubenswrapper[4779]: I0929 19:26:37.250279 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/17ccef11-d761-408d-9d61-717ea6d7b9c3-etc-machine-id\") pod \"17ccef11-d761-408d-9d61-717ea6d7b9c3\" (UID: \"17ccef11-d761-408d-9d61-717ea6d7b9c3\") " Sep 29 19:26:37 crc kubenswrapper[4779]: I0929 19:26:37.250309 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/17ccef11-d761-408d-9d61-717ea6d7b9c3-combined-ca-bundle\") pod \"17ccef11-d761-408d-9d61-717ea6d7b9c3\" (UID: \"17ccef11-d761-408d-9d61-717ea6d7b9c3\") " Sep 29 19:26:37 crc kubenswrapper[4779]: I0929 19:26:37.250370 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rg2bd\" (UniqueName: \"kubernetes.io/projected/17ccef11-d761-408d-9d61-717ea6d7b9c3-kube-api-access-rg2bd\") pod \"17ccef11-d761-408d-9d61-717ea6d7b9c3\" (UID: \"17ccef11-d761-408d-9d61-717ea6d7b9c3\") " Sep 29 19:26:37 crc kubenswrapper[4779]: I0929 19:26:37.253255 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/17ccef11-d761-408d-9d61-717ea6d7b9c3-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "17ccef11-d761-408d-9d61-717ea6d7b9c3" (UID: "17ccef11-d761-408d-9d61-717ea6d7b9c3"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 29 19:26:37 crc kubenswrapper[4779]: I0929 19:26:37.253658 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/17ccef11-d761-408d-9d61-717ea6d7b9c3-logs" (OuterVolumeSpecName: "logs") pod "17ccef11-d761-408d-9d61-717ea6d7b9c3" (UID: "17ccef11-d761-408d-9d61-717ea6d7b9c3"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 19:26:37 crc kubenswrapper[4779]: I0929 19:26:37.257585 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/17ccef11-d761-408d-9d61-717ea6d7b9c3-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "17ccef11-d761-408d-9d61-717ea6d7b9c3" (UID: "17ccef11-d761-408d-9d61-717ea6d7b9c3"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:26:37 crc kubenswrapper[4779]: I0929 19:26:37.262377 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/17ccef11-d761-408d-9d61-717ea6d7b9c3-kube-api-access-rg2bd" (OuterVolumeSpecName: "kube-api-access-rg2bd") pod "17ccef11-d761-408d-9d61-717ea6d7b9c3" (UID: "17ccef11-d761-408d-9d61-717ea6d7b9c3"). InnerVolumeSpecName "kube-api-access-rg2bd". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:26:37 crc kubenswrapper[4779]: I0929 19:26:37.268756 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Sep 29 19:26:37 crc kubenswrapper[4779]: I0929 19:26:37.274957 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/17ccef11-d761-408d-9d61-717ea6d7b9c3-scripts" (OuterVolumeSpecName: "scripts") pod "17ccef11-d761-408d-9d61-717ea6d7b9c3" (UID: "17ccef11-d761-408d-9d61-717ea6d7b9c3"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:26:37 crc kubenswrapper[4779]: I0929 19:26:37.307977 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/17ccef11-d761-408d-9d61-717ea6d7b9c3-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "17ccef11-d761-408d-9d61-717ea6d7b9c3" (UID: "17ccef11-d761-408d-9d61-717ea6d7b9c3"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:26:37 crc kubenswrapper[4779]: I0929 19:26:37.351747 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/81b272b7-f428-4787-b48e-3afcf7e4c8d0-scripts\") pod \"81b272b7-f428-4787-b48e-3afcf7e4c8d0\" (UID: \"81b272b7-f428-4787-b48e-3afcf7e4c8d0\") " Sep 29 19:26:37 crc kubenswrapper[4779]: I0929 19:26:37.351786 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/81b272b7-f428-4787-b48e-3afcf7e4c8d0-combined-ca-bundle\") pod \"81b272b7-f428-4787-b48e-3afcf7e4c8d0\" (UID: \"81b272b7-f428-4787-b48e-3afcf7e4c8d0\") " Sep 29 19:26:37 crc kubenswrapper[4779]: I0929 19:26:37.351843 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/81b272b7-f428-4787-b48e-3afcf7e4c8d0-httpd-run\") pod \"81b272b7-f428-4787-b48e-3afcf7e4c8d0\" (UID: \"81b272b7-f428-4787-b48e-3afcf7e4c8d0\") " Sep 29 19:26:37 crc kubenswrapper[4779]: I0929 19:26:37.351872 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kvf7j\" (UniqueName: \"kubernetes.io/projected/81b272b7-f428-4787-b48e-3afcf7e4c8d0-kube-api-access-kvf7j\") pod \"81b272b7-f428-4787-b48e-3afcf7e4c8d0\" (UID: \"81b272b7-f428-4787-b48e-3afcf7e4c8d0\") " Sep 29 19:26:37 crc kubenswrapper[4779]: I0929 19:26:37.351890 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/81b272b7-f428-4787-b48e-3afcf7e4c8d0-internal-tls-certs\") pod \"81b272b7-f428-4787-b48e-3afcf7e4c8d0\" (UID: \"81b272b7-f428-4787-b48e-3afcf7e4c8d0\") " Sep 29 19:26:37 crc kubenswrapper[4779]: I0929 19:26:37.351924 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/81b272b7-f428-4787-b48e-3afcf7e4c8d0-logs\") pod \"81b272b7-f428-4787-b48e-3afcf7e4c8d0\" (UID: \"81b272b7-f428-4787-b48e-3afcf7e4c8d0\") " Sep 29 19:26:37 crc kubenswrapper[4779]: I0929 19:26:37.351959 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"81b272b7-f428-4787-b48e-3afcf7e4c8d0\" (UID: \"81b272b7-f428-4787-b48e-3afcf7e4c8d0\") " Sep 29 19:26:37 crc kubenswrapper[4779]: I0929 19:26:37.352084 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/81b272b7-f428-4787-b48e-3afcf7e4c8d0-config-data\") pod \"81b272b7-f428-4787-b48e-3afcf7e4c8d0\" (UID: \"81b272b7-f428-4787-b48e-3afcf7e4c8d0\") " Sep 29 19:26:37 crc kubenswrapper[4779]: I0929 19:26:37.352571 4779 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/17ccef11-d761-408d-9d61-717ea6d7b9c3-scripts\") on node \"crc\" DevicePath \"\"" Sep 29 19:26:37 crc kubenswrapper[4779]: I0929 19:26:37.352582 4779 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/17ccef11-d761-408d-9d61-717ea6d7b9c3-etc-machine-id\") on node \"crc\" DevicePath \"\"" Sep 29 19:26:37 crc kubenswrapper[4779]: I0929 19:26:37.352590 4779 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/17ccef11-d761-408d-9d61-717ea6d7b9c3-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 19:26:37 crc kubenswrapper[4779]: I0929 19:26:37.352599 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rg2bd\" (UniqueName: \"kubernetes.io/projected/17ccef11-d761-408d-9d61-717ea6d7b9c3-kube-api-access-rg2bd\") on node \"crc\" DevicePath \"\"" Sep 29 19:26:37 crc kubenswrapper[4779]: I0929 19:26:37.352607 4779 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/17ccef11-d761-408d-9d61-717ea6d7b9c3-config-data-custom\") on node \"crc\" DevicePath \"\"" Sep 29 19:26:37 crc kubenswrapper[4779]: I0929 19:26:37.352615 4779 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/17ccef11-d761-408d-9d61-717ea6d7b9c3-logs\") on node \"crc\" DevicePath \"\"" Sep 29 19:26:37 crc kubenswrapper[4779]: I0929 19:26:37.353438 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/81b272b7-f428-4787-b48e-3afcf7e4c8d0-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "81b272b7-f428-4787-b48e-3afcf7e4c8d0" (UID: "81b272b7-f428-4787-b48e-3afcf7e4c8d0"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 19:26:37 crc kubenswrapper[4779]: I0929 19:26:37.353699 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/81b272b7-f428-4787-b48e-3afcf7e4c8d0-logs" (OuterVolumeSpecName: "logs") pod "81b272b7-f428-4787-b48e-3afcf7e4c8d0" (UID: "81b272b7-f428-4787-b48e-3afcf7e4c8d0"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 19:26:37 crc kubenswrapper[4779]: I0929 19:26:37.355740 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/17ccef11-d761-408d-9d61-717ea6d7b9c3-config-data" (OuterVolumeSpecName: "config-data") pod "17ccef11-d761-408d-9d61-717ea6d7b9c3" (UID: "17ccef11-d761-408d-9d61-717ea6d7b9c3"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:26:37 crc kubenswrapper[4779]: I0929 19:26:37.357967 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/81b272b7-f428-4787-b48e-3afcf7e4c8d0-kube-api-access-kvf7j" (OuterVolumeSpecName: "kube-api-access-kvf7j") pod "81b272b7-f428-4787-b48e-3afcf7e4c8d0" (UID: "81b272b7-f428-4787-b48e-3afcf7e4c8d0"). InnerVolumeSpecName "kube-api-access-kvf7j". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:26:37 crc kubenswrapper[4779]: I0929 19:26:37.376989 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/81b272b7-f428-4787-b48e-3afcf7e4c8d0-scripts" (OuterVolumeSpecName: "scripts") pod "81b272b7-f428-4787-b48e-3afcf7e4c8d0" (UID: "81b272b7-f428-4787-b48e-3afcf7e4c8d0"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:26:37 crc kubenswrapper[4779]: I0929 19:26:37.381576 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage12-crc" (OuterVolumeSpecName: "glance") pod "81b272b7-f428-4787-b48e-3afcf7e4c8d0" (UID: "81b272b7-f428-4787-b48e-3afcf7e4c8d0"). InnerVolumeSpecName "local-storage12-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Sep 29 19:26:37 crc kubenswrapper[4779]: I0929 19:26:37.421340 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/81b272b7-f428-4787-b48e-3afcf7e4c8d0-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "81b272b7-f428-4787-b48e-3afcf7e4c8d0" (UID: "81b272b7-f428-4787-b48e-3afcf7e4c8d0"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:26:37 crc kubenswrapper[4779]: I0929 19:26:37.439823 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/81b272b7-f428-4787-b48e-3afcf7e4c8d0-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "81b272b7-f428-4787-b48e-3afcf7e4c8d0" (UID: "81b272b7-f428-4787-b48e-3afcf7e4c8d0"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:26:37 crc kubenswrapper[4779]: I0929 19:26:37.440768 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/81b272b7-f428-4787-b48e-3afcf7e4c8d0-config-data" (OuterVolumeSpecName: "config-data") pod "81b272b7-f428-4787-b48e-3afcf7e4c8d0" (UID: "81b272b7-f428-4787-b48e-3afcf7e4c8d0"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:26:37 crc kubenswrapper[4779]: I0929 19:26:37.454696 4779 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/81b272b7-f428-4787-b48e-3afcf7e4c8d0-scripts\") on node \"crc\" DevicePath \"\"" Sep 29 19:26:37 crc kubenswrapper[4779]: I0929 19:26:37.454720 4779 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/81b272b7-f428-4787-b48e-3afcf7e4c8d0-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 19:26:37 crc kubenswrapper[4779]: I0929 19:26:37.454729 4779 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/81b272b7-f428-4787-b48e-3afcf7e4c8d0-httpd-run\") on node \"crc\" DevicePath \"\"" Sep 29 19:26:37 crc kubenswrapper[4779]: I0929 19:26:37.454739 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kvf7j\" (UniqueName: \"kubernetes.io/projected/81b272b7-f428-4787-b48e-3afcf7e4c8d0-kube-api-access-kvf7j\") on node \"crc\" DevicePath \"\"" Sep 29 19:26:37 crc kubenswrapper[4779]: I0929 19:26:37.454749 4779 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/81b272b7-f428-4787-b48e-3afcf7e4c8d0-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Sep 29 19:26:37 crc kubenswrapper[4779]: I0929 19:26:37.454757 4779 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/81b272b7-f428-4787-b48e-3afcf7e4c8d0-logs\") on node \"crc\" DevicePath \"\"" Sep 29 19:26:37 crc kubenswrapper[4779]: I0929 19:26:37.454780 4779 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") on node \"crc\" " Sep 29 19:26:37 crc kubenswrapper[4779]: I0929 19:26:37.454790 4779 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/81b272b7-f428-4787-b48e-3afcf7e4c8d0-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 19:26:37 crc kubenswrapper[4779]: I0929 19:26:37.454800 4779 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/17ccef11-d761-408d-9d61-717ea6d7b9c3-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 19:26:37 crc kubenswrapper[4779]: I0929 19:26:37.472579 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-84b966f6c9-vmh6c" Sep 29 19:26:37 crc kubenswrapper[4779]: I0929 19:26:37.476921 4779 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage12-crc" (UniqueName: "kubernetes.io/local-volume/local-storage12-crc") on node "crc" Sep 29 19:26:37 crc kubenswrapper[4779]: I0929 19:26:37.556232 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/7d6613f2-eca3-41c7-86c4-7c2726764f27-ovsdbserver-nb\") pod \"7d6613f2-eca3-41c7-86c4-7c2726764f27\" (UID: \"7d6613f2-eca3-41c7-86c4-7c2726764f27\") " Sep 29 19:26:37 crc kubenswrapper[4779]: I0929 19:26:37.556296 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7d6613f2-eca3-41c7-86c4-7c2726764f27-config\") pod \"7d6613f2-eca3-41c7-86c4-7c2726764f27\" (UID: \"7d6613f2-eca3-41c7-86c4-7c2726764f27\") " Sep 29 19:26:37 crc kubenswrapper[4779]: I0929 19:26:37.556405 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/7d6613f2-eca3-41c7-86c4-7c2726764f27-dns-swift-storage-0\") pod \"7d6613f2-eca3-41c7-86c4-7c2726764f27\" (UID: \"7d6613f2-eca3-41c7-86c4-7c2726764f27\") " Sep 29 19:26:37 crc kubenswrapper[4779]: I0929 19:26:37.558605 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7d6613f2-eca3-41c7-86c4-7c2726764f27-dns-svc\") pod \"7d6613f2-eca3-41c7-86c4-7c2726764f27\" (UID: \"7d6613f2-eca3-41c7-86c4-7c2726764f27\") " Sep 29 19:26:37 crc kubenswrapper[4779]: I0929 19:26:37.558678 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mgv9k\" (UniqueName: \"kubernetes.io/projected/7d6613f2-eca3-41c7-86c4-7c2726764f27-kube-api-access-mgv9k\") pod \"7d6613f2-eca3-41c7-86c4-7c2726764f27\" (UID: \"7d6613f2-eca3-41c7-86c4-7c2726764f27\") " Sep 29 19:26:37 crc kubenswrapper[4779]: I0929 19:26:37.558704 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/7d6613f2-eca3-41c7-86c4-7c2726764f27-ovsdbserver-sb\") pod \"7d6613f2-eca3-41c7-86c4-7c2726764f27\" (UID: \"7d6613f2-eca3-41c7-86c4-7c2726764f27\") " Sep 29 19:26:37 crc kubenswrapper[4779]: I0929 19:26:37.559177 4779 reconciler_common.go:293] "Volume detached for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") on node \"crc\" DevicePath \"\"" Sep 29 19:26:37 crc kubenswrapper[4779]: I0929 19:26:37.567057 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7d6613f2-eca3-41c7-86c4-7c2726764f27-kube-api-access-mgv9k" (OuterVolumeSpecName: "kube-api-access-mgv9k") pod "7d6613f2-eca3-41c7-86c4-7c2726764f27" (UID: "7d6613f2-eca3-41c7-86c4-7c2726764f27"). InnerVolumeSpecName "kube-api-access-mgv9k". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:26:37 crc kubenswrapper[4779]: I0929 19:26:37.620226 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7d6613f2-eca3-41c7-86c4-7c2726764f27-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "7d6613f2-eca3-41c7-86c4-7c2726764f27" (UID: "7d6613f2-eca3-41c7-86c4-7c2726764f27"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:26:37 crc kubenswrapper[4779]: I0929 19:26:37.620458 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7d6613f2-eca3-41c7-86c4-7c2726764f27-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "7d6613f2-eca3-41c7-86c4-7c2726764f27" (UID: "7d6613f2-eca3-41c7-86c4-7c2726764f27"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:26:37 crc kubenswrapper[4779]: I0929 19:26:37.622064 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"58f628df-8d11-4663-b84b-0c810edaa5fb","Type":"ContainerStarted","Data":"69e4aa04e3e4de3e6517b5ce9c83f883030ace215d44030525d76e36d344d39c"} Sep 29 19:26:37 crc kubenswrapper[4779]: I0929 19:26:37.628791 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7d6613f2-eca3-41c7-86c4-7c2726764f27-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "7d6613f2-eca3-41c7-86c4-7c2726764f27" (UID: "7d6613f2-eca3-41c7-86c4-7c2726764f27"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:26:37 crc kubenswrapper[4779]: I0929 19:26:37.629401 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"81b272b7-f428-4787-b48e-3afcf7e4c8d0","Type":"ContainerDied","Data":"70bfe4939bac3b8923ef36f4c6f320830653726acda4cfb88516183ed8be3a11"} Sep 29 19:26:37 crc kubenswrapper[4779]: I0929 19:26:37.629455 4779 scope.go:117] "RemoveContainer" containerID="89810bd9c684160b3b3b03fb5ff83db2b6c73920aef64a82ff698aa35e4760b4" Sep 29 19:26:37 crc kubenswrapper[4779]: I0929 19:26:37.629632 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Sep 29 19:26:37 crc kubenswrapper[4779]: I0929 19:26:37.633748 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7d6613f2-eca3-41c7-86c4-7c2726764f27-config" (OuterVolumeSpecName: "config") pod "7d6613f2-eca3-41c7-86c4-7c2726764f27" (UID: "7d6613f2-eca3-41c7-86c4-7c2726764f27"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:26:37 crc kubenswrapper[4779]: I0929 19:26:37.637711 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7d6613f2-eca3-41c7-86c4-7c2726764f27-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "7d6613f2-eca3-41c7-86c4-7c2726764f27" (UID: "7d6613f2-eca3-41c7-86c4-7c2726764f27"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:26:37 crc kubenswrapper[4779]: I0929 19:26:37.658628 4779 generic.go:334] "Generic (PLEG): container finished" podID="7d6613f2-eca3-41c7-86c4-7c2726764f27" containerID="b79426598752e61c80296293f7cb5311a69cc052b027318304a4d7dd73d32908" exitCode=0 Sep 29 19:26:37 crc kubenswrapper[4779]: I0929 19:26:37.658768 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-84b966f6c9-vmh6c" event={"ID":"7d6613f2-eca3-41c7-86c4-7c2726764f27","Type":"ContainerDied","Data":"b79426598752e61c80296293f7cb5311a69cc052b027318304a4d7dd73d32908"} Sep 29 19:26:37 crc kubenswrapper[4779]: I0929 19:26:37.658802 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-84b966f6c9-vmh6c" event={"ID":"7d6613f2-eca3-41c7-86c4-7c2726764f27","Type":"ContainerDied","Data":"b044cafb324a3281fc06e771f9206aa213e5a0414e51eac1a2b1bef61c8111ec"} Sep 29 19:26:37 crc kubenswrapper[4779]: I0929 19:26:37.658919 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-84b966f6c9-vmh6c" Sep 29 19:26:37 crc kubenswrapper[4779]: I0929 19:26:37.663517 4779 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/7d6613f2-eca3-41c7-86c4-7c2726764f27-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Sep 29 19:26:37 crc kubenswrapper[4779]: I0929 19:26:37.663550 4779 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7d6613f2-eca3-41c7-86c4-7c2726764f27-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 29 19:26:37 crc kubenswrapper[4779]: I0929 19:26:37.663562 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mgv9k\" (UniqueName: \"kubernetes.io/projected/7d6613f2-eca3-41c7-86c4-7c2726764f27-kube-api-access-mgv9k\") on node \"crc\" DevicePath \"\"" Sep 29 19:26:37 crc kubenswrapper[4779]: I0929 19:26:37.663594 4779 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/7d6613f2-eca3-41c7-86c4-7c2726764f27-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Sep 29 19:26:37 crc kubenswrapper[4779]: I0929 19:26:37.663606 4779 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/7d6613f2-eca3-41c7-86c4-7c2726764f27-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Sep 29 19:26:37 crc kubenswrapper[4779]: I0929 19:26:37.663617 4779 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7d6613f2-eca3-41c7-86c4-7c2726764f27-config\") on node \"crc\" DevicePath \"\"" Sep 29 19:26:37 crc kubenswrapper[4779]: I0929 19:26:37.665747 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstackclient" podStartSLOduration=2.5952430079999997 podStartE2EDuration="21.665718583s" podCreationTimestamp="2025-09-29 19:26:16 +0000 UTC" firstStartedPulling="2025-09-29 19:26:17.750904943 +0000 UTC m=+1088.635330053" lastFinishedPulling="2025-09-29 19:26:36.821380528 +0000 UTC m=+1107.705805628" observedRunningTime="2025-09-29 19:26:37.645520882 +0000 UTC m=+1108.529945992" watchObservedRunningTime="2025-09-29 19:26:37.665718583 +0000 UTC m=+1108.550143683" Sep 29 19:26:37 crc kubenswrapper[4779]: I0929 19:26:37.672346 4779 generic.go:334] "Generic (PLEG): container finished" podID="64e12e10-e0f1-4706-b0a2-ec78ce9921a1" containerID="336940a08ed36f3eff3815dcf956450178f41f48a039feddb49310c234ef6ee2" exitCode=0 Sep 29 19:26:37 crc kubenswrapper[4779]: I0929 19:26:37.672472 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-6656fdb884-9h5nb" event={"ID":"64e12e10-e0f1-4706-b0a2-ec78ce9921a1","Type":"ContainerDied","Data":"336940a08ed36f3eff3815dcf956450178f41f48a039feddb49310c234ef6ee2"} Sep 29 19:26:37 crc kubenswrapper[4779]: I0929 19:26:37.706642 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"00a7767e-dcd8-4e64-97b0-6d5bf4ec280e","Type":"ContainerStarted","Data":"08cce7d93838bd1823e591ae7784a1a7ee1d2a92bffe784154d7706397159c99"} Sep 29 19:26:37 crc kubenswrapper[4779]: I0929 19:26:37.706837 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="00a7767e-dcd8-4e64-97b0-6d5bf4ec280e" containerName="ceilometer-central-agent" containerID="cri-o://e1e73527fa5092322476e44a18898e4bad17d9da954b7048c3f8440d139315f3" gracePeriod=30 Sep 29 19:26:37 crc kubenswrapper[4779]: I0929 19:26:37.707122 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Sep 29 19:26:37 crc kubenswrapper[4779]: I0929 19:26:37.707483 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="00a7767e-dcd8-4e64-97b0-6d5bf4ec280e" containerName="proxy-httpd" containerID="cri-o://08cce7d93838bd1823e591ae7784a1a7ee1d2a92bffe784154d7706397159c99" gracePeriod=30 Sep 29 19:26:37 crc kubenswrapper[4779]: I0929 19:26:37.707541 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="00a7767e-dcd8-4e64-97b0-6d5bf4ec280e" containerName="sg-core" containerID="cri-o://a9b8a0fd259cbd970917d192eb68c61afc4e21a0eb7d45ff714b4a84b9278dc8" gracePeriod=30 Sep 29 19:26:37 crc kubenswrapper[4779]: I0929 19:26:37.707582 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="00a7767e-dcd8-4e64-97b0-6d5bf4ec280e" containerName="ceilometer-notification-agent" containerID="cri-o://0ef60d95f076d975cbaa63b5588150976ee184cdf1da3378acf55aa6ef20a00d" gracePeriod=30 Sep 29 19:26:37 crc kubenswrapper[4779]: I0929 19:26:37.726257 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"17ccef11-d761-408d-9d61-717ea6d7b9c3","Type":"ContainerDied","Data":"b5653bd506dec244a616ef07ea2445d3b44a3bbcea599a170f8fdb9f0b5d88f5"} Sep 29 19:26:37 crc kubenswrapper[4779]: I0929 19:26:37.726280 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Sep 29 19:26:37 crc kubenswrapper[4779]: I0929 19:26:37.727369 4779 scope.go:117] "RemoveContainer" containerID="84cac98c4a76840282215ff3ab3b592f6ac47af3eaa67cd43f5a9e73129b5d79" Sep 29 19:26:37 crc kubenswrapper[4779]: I0929 19:26:37.770627 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-84b966f6c9-vmh6c"] Sep 29 19:26:37 crc kubenswrapper[4779]: I0929 19:26:37.790649 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-84b966f6c9-vmh6c"] Sep 29 19:26:37 crc kubenswrapper[4779]: I0929 19:26:37.791340 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.165360773 podStartE2EDuration="12.791300721s" podCreationTimestamp="2025-09-29 19:26:25 +0000 UTC" firstStartedPulling="2025-09-29 19:26:26.198077692 +0000 UTC m=+1097.082502792" lastFinishedPulling="2025-09-29 19:26:36.82401764 +0000 UTC m=+1107.708442740" observedRunningTime="2025-09-29 19:26:37.743104225 +0000 UTC m=+1108.627529315" watchObservedRunningTime="2025-09-29 19:26:37.791300721 +0000 UTC m=+1108.675725821" Sep 29 19:26:37 crc kubenswrapper[4779]: I0929 19:26:37.927298 4779 scope.go:117] "RemoveContainer" containerID="b79426598752e61c80296293f7cb5311a69cc052b027318304a4d7dd73d32908" Sep 29 19:26:37 crc kubenswrapper[4779]: I0929 19:26:37.977823 4779 scope.go:117] "RemoveContainer" containerID="8ba250f01f3609f9e9c585a55b2e7c0ff35f0f61c7d628a8eb36f4e8e172ecd0" Sep 29 19:26:37 crc kubenswrapper[4779]: I0929 19:26:37.982457 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Sep 29 19:26:38 crc kubenswrapper[4779]: I0929 19:26:38.005189 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-api-0"] Sep 29 19:26:38 crc kubenswrapper[4779]: I0929 19:26:38.016274 4779 scope.go:117] "RemoveContainer" containerID="b79426598752e61c80296293f7cb5311a69cc052b027318304a4d7dd73d32908" Sep 29 19:26:38 crc kubenswrapper[4779]: E0929 19:26:38.017967 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b79426598752e61c80296293f7cb5311a69cc052b027318304a4d7dd73d32908\": container with ID starting with b79426598752e61c80296293f7cb5311a69cc052b027318304a4d7dd73d32908 not found: ID does not exist" containerID="b79426598752e61c80296293f7cb5311a69cc052b027318304a4d7dd73d32908" Sep 29 19:26:38 crc kubenswrapper[4779]: I0929 19:26:38.017997 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b79426598752e61c80296293f7cb5311a69cc052b027318304a4d7dd73d32908"} err="failed to get container status \"b79426598752e61c80296293f7cb5311a69cc052b027318304a4d7dd73d32908\": rpc error: code = NotFound desc = could not find container \"b79426598752e61c80296293f7cb5311a69cc052b027318304a4d7dd73d32908\": container with ID starting with b79426598752e61c80296293f7cb5311a69cc052b027318304a4d7dd73d32908 not found: ID does not exist" Sep 29 19:26:38 crc kubenswrapper[4779]: I0929 19:26:38.018041 4779 scope.go:117] "RemoveContainer" containerID="8ba250f01f3609f9e9c585a55b2e7c0ff35f0f61c7d628a8eb36f4e8e172ecd0" Sep 29 19:26:38 crc kubenswrapper[4779]: E0929 19:26:38.018355 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8ba250f01f3609f9e9c585a55b2e7c0ff35f0f61c7d628a8eb36f4e8e172ecd0\": container with ID starting with 8ba250f01f3609f9e9c585a55b2e7c0ff35f0f61c7d628a8eb36f4e8e172ecd0 not found: ID does not exist" containerID="8ba250f01f3609f9e9c585a55b2e7c0ff35f0f61c7d628a8eb36f4e8e172ecd0" Sep 29 19:26:38 crc kubenswrapper[4779]: I0929 19:26:38.018376 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8ba250f01f3609f9e9c585a55b2e7c0ff35f0f61c7d628a8eb36f4e8e172ecd0"} err="failed to get container status \"8ba250f01f3609f9e9c585a55b2e7c0ff35f0f61c7d628a8eb36f4e8e172ecd0\": rpc error: code = NotFound desc = could not find container \"8ba250f01f3609f9e9c585a55b2e7c0ff35f0f61c7d628a8eb36f4e8e172ecd0\": container with ID starting with 8ba250f01f3609f9e9c585a55b2e7c0ff35f0f61c7d628a8eb36f4e8e172ecd0 not found: ID does not exist" Sep 29 19:26:38 crc kubenswrapper[4779]: I0929 19:26:38.018388 4779 scope.go:117] "RemoveContainer" containerID="3c1ce1fb66ff1b227107b892d34f6e351ab88ce89e9547798f0ac843b7dd25cf" Sep 29 19:26:38 crc kubenswrapper[4779]: I0929 19:26:38.032121 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Sep 29 19:26:38 crc kubenswrapper[4779]: I0929 19:26:38.037537 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-internal-api-0"] Sep 29 19:26:38 crc kubenswrapper[4779]: E0929 19:26:38.039368 4779 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7d6613f2_eca3_41c7_86c4_7c2726764f27.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7d6613f2_eca3_41c7_86c4_7c2726764f27.slice/crio-b044cafb324a3281fc06e771f9206aa213e5a0414e51eac1a2b1bef61c8111ec\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod00a7767e_dcd8_4e64_97b0_6d5bf4ec280e.slice/crio-conmon-a9b8a0fd259cbd970917d192eb68c61afc4e21a0eb7d45ff714b4a84b9278dc8.scope\": RecentStats: unable to find data in memory cache]" Sep 29 19:26:38 crc kubenswrapper[4779]: I0929 19:26:38.047788 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-api-0"] Sep 29 19:26:38 crc kubenswrapper[4779]: E0929 19:26:38.048284 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7d6613f2-eca3-41c7-86c4-7c2726764f27" containerName="init" Sep 29 19:26:38 crc kubenswrapper[4779]: I0929 19:26:38.048307 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="7d6613f2-eca3-41c7-86c4-7c2726764f27" containerName="init" Sep 29 19:26:38 crc kubenswrapper[4779]: E0929 19:26:38.048403 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="81b272b7-f428-4787-b48e-3afcf7e4c8d0" containerName="glance-httpd" Sep 29 19:26:38 crc kubenswrapper[4779]: I0929 19:26:38.048413 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="81b272b7-f428-4787-b48e-3afcf7e4c8d0" containerName="glance-httpd" Sep 29 19:26:38 crc kubenswrapper[4779]: E0929 19:26:38.048426 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7d6613f2-eca3-41c7-86c4-7c2726764f27" containerName="dnsmasq-dns" Sep 29 19:26:38 crc kubenswrapper[4779]: I0929 19:26:38.048433 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="7d6613f2-eca3-41c7-86c4-7c2726764f27" containerName="dnsmasq-dns" Sep 29 19:26:38 crc kubenswrapper[4779]: E0929 19:26:38.048443 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bd571bff-be79-40df-b5bb-408fb8017dcc" containerName="dnsmasq-dns" Sep 29 19:26:38 crc kubenswrapper[4779]: I0929 19:26:38.048450 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="bd571bff-be79-40df-b5bb-408fb8017dcc" containerName="dnsmasq-dns" Sep 29 19:26:38 crc kubenswrapper[4779]: E0929 19:26:38.048466 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="81b272b7-f428-4787-b48e-3afcf7e4c8d0" containerName="glance-log" Sep 29 19:26:38 crc kubenswrapper[4779]: I0929 19:26:38.048473 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="81b272b7-f428-4787-b48e-3afcf7e4c8d0" containerName="glance-log" Sep 29 19:26:38 crc kubenswrapper[4779]: E0929 19:26:38.048487 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fa13ed58-de9d-465d-aa87-9306284e4f23" containerName="mariadb-database-create" Sep 29 19:26:38 crc kubenswrapper[4779]: I0929 19:26:38.048495 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="fa13ed58-de9d-465d-aa87-9306284e4f23" containerName="mariadb-database-create" Sep 29 19:26:38 crc kubenswrapper[4779]: E0929 19:26:38.048508 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="37527e62-544b-42e4-9223-44fe8d4106b2" containerName="mariadb-database-create" Sep 29 19:26:38 crc kubenswrapper[4779]: I0929 19:26:38.048515 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="37527e62-544b-42e4-9223-44fe8d4106b2" containerName="mariadb-database-create" Sep 29 19:26:38 crc kubenswrapper[4779]: E0929 19:26:38.048527 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8590a86d-3bde-4c3d-8f3f-52de9414caa6" containerName="mariadb-database-create" Sep 29 19:26:38 crc kubenswrapper[4779]: I0929 19:26:38.048534 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="8590a86d-3bde-4c3d-8f3f-52de9414caa6" containerName="mariadb-database-create" Sep 29 19:26:38 crc kubenswrapper[4779]: E0929 19:26:38.048555 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bd571bff-be79-40df-b5bb-408fb8017dcc" containerName="init" Sep 29 19:26:38 crc kubenswrapper[4779]: I0929 19:26:38.048563 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="bd571bff-be79-40df-b5bb-408fb8017dcc" containerName="init" Sep 29 19:26:38 crc kubenswrapper[4779]: E0929 19:26:38.048588 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="17ccef11-d761-408d-9d61-717ea6d7b9c3" containerName="cinder-api-log" Sep 29 19:26:38 crc kubenswrapper[4779]: I0929 19:26:38.048595 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="17ccef11-d761-408d-9d61-717ea6d7b9c3" containerName="cinder-api-log" Sep 29 19:26:38 crc kubenswrapper[4779]: E0929 19:26:38.048610 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="17ccef11-d761-408d-9d61-717ea6d7b9c3" containerName="cinder-api" Sep 29 19:26:38 crc kubenswrapper[4779]: I0929 19:26:38.048619 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="17ccef11-d761-408d-9d61-717ea6d7b9c3" containerName="cinder-api" Sep 29 19:26:38 crc kubenswrapper[4779]: I0929 19:26:38.048851 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="bd571bff-be79-40df-b5bb-408fb8017dcc" containerName="dnsmasq-dns" Sep 29 19:26:38 crc kubenswrapper[4779]: I0929 19:26:38.048863 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="7d6613f2-eca3-41c7-86c4-7c2726764f27" containerName="dnsmasq-dns" Sep 29 19:26:38 crc kubenswrapper[4779]: I0929 19:26:38.048883 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="17ccef11-d761-408d-9d61-717ea6d7b9c3" containerName="cinder-api" Sep 29 19:26:38 crc kubenswrapper[4779]: I0929 19:26:38.048896 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="8590a86d-3bde-4c3d-8f3f-52de9414caa6" containerName="mariadb-database-create" Sep 29 19:26:38 crc kubenswrapper[4779]: I0929 19:26:38.048909 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="81b272b7-f428-4787-b48e-3afcf7e4c8d0" containerName="glance-log" Sep 29 19:26:38 crc kubenswrapper[4779]: I0929 19:26:38.048926 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="fa13ed58-de9d-465d-aa87-9306284e4f23" containerName="mariadb-database-create" Sep 29 19:26:38 crc kubenswrapper[4779]: I0929 19:26:38.048933 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="37527e62-544b-42e4-9223-44fe8d4106b2" containerName="mariadb-database-create" Sep 29 19:26:38 crc kubenswrapper[4779]: I0929 19:26:38.048947 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="81b272b7-f428-4787-b48e-3afcf7e4c8d0" containerName="glance-httpd" Sep 29 19:26:38 crc kubenswrapper[4779]: I0929 19:26:38.048956 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="17ccef11-d761-408d-9d61-717ea6d7b9c3" containerName="cinder-api-log" Sep 29 19:26:38 crc kubenswrapper[4779]: I0929 19:26:38.050051 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Sep 29 19:26:38 crc kubenswrapper[4779]: I0929 19:26:38.053131 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-api-config-data" Sep 29 19:26:38 crc kubenswrapper[4779]: I0929 19:26:38.053222 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cinder-internal-svc" Sep 29 19:26:38 crc kubenswrapper[4779]: I0929 19:26:38.053364 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cinder-public-svc" Sep 29 19:26:38 crc kubenswrapper[4779]: I0929 19:26:38.057289 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Sep 29 19:26:38 crc kubenswrapper[4779]: I0929 19:26:38.058733 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Sep 29 19:26:38 crc kubenswrapper[4779]: I0929 19:26:38.061417 4779 scope.go:117] "RemoveContainer" containerID="5b019a24eedbdf593bb1bd2f0b3540272ff25740bbd4b02324c40eb7cbabbeb0" Sep 29 19:26:38 crc kubenswrapper[4779]: I0929 19:26:38.061869 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-internal-svc" Sep 29 19:26:38 crc kubenswrapper[4779]: I0929 19:26:38.064763 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Sep 29 19:26:38 crc kubenswrapper[4779]: I0929 19:26:38.067357 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Sep 29 19:26:38 crc kubenswrapper[4779]: I0929 19:26:38.072607 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Sep 29 19:26:38 crc kubenswrapper[4779]: I0929 19:26:38.175053 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/880c493a-a9b5-4cdc-a4b1-256feeee3e1b-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"880c493a-a9b5-4cdc-a4b1-256feeee3e1b\") " pod="openstack/glance-default-internal-api-0" Sep 29 19:26:38 crc kubenswrapper[4779]: I0929 19:26:38.175145 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/880c493a-a9b5-4cdc-a4b1-256feeee3e1b-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"880c493a-a9b5-4cdc-a4b1-256feeee3e1b\") " pod="openstack/glance-default-internal-api-0" Sep 29 19:26:38 crc kubenswrapper[4779]: I0929 19:26:38.175216 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e063f6a5-dcd3-413d-bb65-e9ceeca73df0-logs\") pod \"cinder-api-0\" (UID: \"e063f6a5-dcd3-413d-bb65-e9ceeca73df0\") " pod="openstack/cinder-api-0" Sep 29 19:26:38 crc kubenswrapper[4779]: I0929 19:26:38.175238 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/e063f6a5-dcd3-413d-bb65-e9ceeca73df0-config-data-custom\") pod \"cinder-api-0\" (UID: \"e063f6a5-dcd3-413d-bb65-e9ceeca73df0\") " pod="openstack/cinder-api-0" Sep 29 19:26:38 crc kubenswrapper[4779]: I0929 19:26:38.175381 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/e063f6a5-dcd3-413d-bb65-e9ceeca73df0-public-tls-certs\") pod \"cinder-api-0\" (UID: \"e063f6a5-dcd3-413d-bb65-e9ceeca73df0\") " pod="openstack/cinder-api-0" Sep 29 19:26:38 crc kubenswrapper[4779]: I0929 19:26:38.175419 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/880c493a-a9b5-4cdc-a4b1-256feeee3e1b-logs\") pod \"glance-default-internal-api-0\" (UID: \"880c493a-a9b5-4cdc-a4b1-256feeee3e1b\") " pod="openstack/glance-default-internal-api-0" Sep 29 19:26:38 crc kubenswrapper[4779]: I0929 19:26:38.175464 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/880c493a-a9b5-4cdc-a4b1-256feeee3e1b-scripts\") pod \"glance-default-internal-api-0\" (UID: \"880c493a-a9b5-4cdc-a4b1-256feeee3e1b\") " pod="openstack/glance-default-internal-api-0" Sep 29 19:26:38 crc kubenswrapper[4779]: I0929 19:26:38.175484 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/e063f6a5-dcd3-413d-bb65-e9ceeca73df0-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"e063f6a5-dcd3-413d-bb65-e9ceeca73df0\") " pod="openstack/cinder-api-0" Sep 29 19:26:38 crc kubenswrapper[4779]: I0929 19:26:38.175517 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e063f6a5-dcd3-413d-bb65-e9ceeca73df0-config-data\") pod \"cinder-api-0\" (UID: \"e063f6a5-dcd3-413d-bb65-e9ceeca73df0\") " pod="openstack/cinder-api-0" Sep 29 19:26:38 crc kubenswrapper[4779]: I0929 19:26:38.175535 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cdzc7\" (UniqueName: \"kubernetes.io/projected/e063f6a5-dcd3-413d-bb65-e9ceeca73df0-kube-api-access-cdzc7\") pod \"cinder-api-0\" (UID: \"e063f6a5-dcd3-413d-bb65-e9ceeca73df0\") " pod="openstack/cinder-api-0" Sep 29 19:26:38 crc kubenswrapper[4779]: I0929 19:26:38.175597 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e063f6a5-dcd3-413d-bb65-e9ceeca73df0-scripts\") pod \"cinder-api-0\" (UID: \"e063f6a5-dcd3-413d-bb65-e9ceeca73df0\") " pod="openstack/cinder-api-0" Sep 29 19:26:38 crc kubenswrapper[4779]: I0929 19:26:38.175629 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/880c493a-a9b5-4cdc-a4b1-256feeee3e1b-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"880c493a-a9b5-4cdc-a4b1-256feeee3e1b\") " pod="openstack/glance-default-internal-api-0" Sep 29 19:26:38 crc kubenswrapper[4779]: I0929 19:26:38.175768 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/880c493a-a9b5-4cdc-a4b1-256feeee3e1b-config-data\") pod \"glance-default-internal-api-0\" (UID: \"880c493a-a9b5-4cdc-a4b1-256feeee3e1b\") " pod="openstack/glance-default-internal-api-0" Sep 29 19:26:38 crc kubenswrapper[4779]: I0929 19:26:38.175797 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n522l\" (UniqueName: \"kubernetes.io/projected/880c493a-a9b5-4cdc-a4b1-256feeee3e1b-kube-api-access-n522l\") pod \"glance-default-internal-api-0\" (UID: \"880c493a-a9b5-4cdc-a4b1-256feeee3e1b\") " pod="openstack/glance-default-internal-api-0" Sep 29 19:26:38 crc kubenswrapper[4779]: I0929 19:26:38.175838 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"glance-default-internal-api-0\" (UID: \"880c493a-a9b5-4cdc-a4b1-256feeee3e1b\") " pod="openstack/glance-default-internal-api-0" Sep 29 19:26:38 crc kubenswrapper[4779]: I0929 19:26:38.175896 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/e063f6a5-dcd3-413d-bb65-e9ceeca73df0-etc-machine-id\") pod \"cinder-api-0\" (UID: \"e063f6a5-dcd3-413d-bb65-e9ceeca73df0\") " pod="openstack/cinder-api-0" Sep 29 19:26:38 crc kubenswrapper[4779]: I0929 19:26:38.175981 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e063f6a5-dcd3-413d-bb65-e9ceeca73df0-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"e063f6a5-dcd3-413d-bb65-e9ceeca73df0\") " pod="openstack/cinder-api-0" Sep 29 19:26:38 crc kubenswrapper[4779]: I0929 19:26:38.277282 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e063f6a5-dcd3-413d-bb65-e9ceeca73df0-scripts\") pod \"cinder-api-0\" (UID: \"e063f6a5-dcd3-413d-bb65-e9ceeca73df0\") " pod="openstack/cinder-api-0" Sep 29 19:26:38 crc kubenswrapper[4779]: I0929 19:26:38.277581 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/880c493a-a9b5-4cdc-a4b1-256feeee3e1b-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"880c493a-a9b5-4cdc-a4b1-256feeee3e1b\") " pod="openstack/glance-default-internal-api-0" Sep 29 19:26:38 crc kubenswrapper[4779]: I0929 19:26:38.277650 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/880c493a-a9b5-4cdc-a4b1-256feeee3e1b-config-data\") pod \"glance-default-internal-api-0\" (UID: \"880c493a-a9b5-4cdc-a4b1-256feeee3e1b\") " pod="openstack/glance-default-internal-api-0" Sep 29 19:26:38 crc kubenswrapper[4779]: I0929 19:26:38.277673 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n522l\" (UniqueName: \"kubernetes.io/projected/880c493a-a9b5-4cdc-a4b1-256feeee3e1b-kube-api-access-n522l\") pod \"glance-default-internal-api-0\" (UID: \"880c493a-a9b5-4cdc-a4b1-256feeee3e1b\") " pod="openstack/glance-default-internal-api-0" Sep 29 19:26:38 crc kubenswrapper[4779]: I0929 19:26:38.277696 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"glance-default-internal-api-0\" (UID: \"880c493a-a9b5-4cdc-a4b1-256feeee3e1b\") " pod="openstack/glance-default-internal-api-0" Sep 29 19:26:38 crc kubenswrapper[4779]: I0929 19:26:38.277713 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/e063f6a5-dcd3-413d-bb65-e9ceeca73df0-etc-machine-id\") pod \"cinder-api-0\" (UID: \"e063f6a5-dcd3-413d-bb65-e9ceeca73df0\") " pod="openstack/cinder-api-0" Sep 29 19:26:38 crc kubenswrapper[4779]: I0929 19:26:38.277742 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e063f6a5-dcd3-413d-bb65-e9ceeca73df0-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"e063f6a5-dcd3-413d-bb65-e9ceeca73df0\") " pod="openstack/cinder-api-0" Sep 29 19:26:38 crc kubenswrapper[4779]: I0929 19:26:38.277770 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/880c493a-a9b5-4cdc-a4b1-256feeee3e1b-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"880c493a-a9b5-4cdc-a4b1-256feeee3e1b\") " pod="openstack/glance-default-internal-api-0" Sep 29 19:26:38 crc kubenswrapper[4779]: I0929 19:26:38.277794 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/880c493a-a9b5-4cdc-a4b1-256feeee3e1b-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"880c493a-a9b5-4cdc-a4b1-256feeee3e1b\") " pod="openstack/glance-default-internal-api-0" Sep 29 19:26:38 crc kubenswrapper[4779]: I0929 19:26:38.277822 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e063f6a5-dcd3-413d-bb65-e9ceeca73df0-logs\") pod \"cinder-api-0\" (UID: \"e063f6a5-dcd3-413d-bb65-e9ceeca73df0\") " pod="openstack/cinder-api-0" Sep 29 19:26:38 crc kubenswrapper[4779]: I0929 19:26:38.277840 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/e063f6a5-dcd3-413d-bb65-e9ceeca73df0-config-data-custom\") pod \"cinder-api-0\" (UID: \"e063f6a5-dcd3-413d-bb65-e9ceeca73df0\") " pod="openstack/cinder-api-0" Sep 29 19:26:38 crc kubenswrapper[4779]: I0929 19:26:38.277865 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/e063f6a5-dcd3-413d-bb65-e9ceeca73df0-public-tls-certs\") pod \"cinder-api-0\" (UID: \"e063f6a5-dcd3-413d-bb65-e9ceeca73df0\") " pod="openstack/cinder-api-0" Sep 29 19:26:38 crc kubenswrapper[4779]: I0929 19:26:38.277879 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/880c493a-a9b5-4cdc-a4b1-256feeee3e1b-logs\") pod \"glance-default-internal-api-0\" (UID: \"880c493a-a9b5-4cdc-a4b1-256feeee3e1b\") " pod="openstack/glance-default-internal-api-0" Sep 29 19:26:38 crc kubenswrapper[4779]: I0929 19:26:38.277897 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/880c493a-a9b5-4cdc-a4b1-256feeee3e1b-scripts\") pod \"glance-default-internal-api-0\" (UID: \"880c493a-a9b5-4cdc-a4b1-256feeee3e1b\") " pod="openstack/glance-default-internal-api-0" Sep 29 19:26:38 crc kubenswrapper[4779]: I0929 19:26:38.277914 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/e063f6a5-dcd3-413d-bb65-e9ceeca73df0-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"e063f6a5-dcd3-413d-bb65-e9ceeca73df0\") " pod="openstack/cinder-api-0" Sep 29 19:26:38 crc kubenswrapper[4779]: I0929 19:26:38.277934 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e063f6a5-dcd3-413d-bb65-e9ceeca73df0-config-data\") pod \"cinder-api-0\" (UID: \"e063f6a5-dcd3-413d-bb65-e9ceeca73df0\") " pod="openstack/cinder-api-0" Sep 29 19:26:38 crc kubenswrapper[4779]: I0929 19:26:38.277950 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cdzc7\" (UniqueName: \"kubernetes.io/projected/e063f6a5-dcd3-413d-bb65-e9ceeca73df0-kube-api-access-cdzc7\") pod \"cinder-api-0\" (UID: \"e063f6a5-dcd3-413d-bb65-e9ceeca73df0\") " pod="openstack/cinder-api-0" Sep 29 19:26:38 crc kubenswrapper[4779]: I0929 19:26:38.278868 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/e063f6a5-dcd3-413d-bb65-e9ceeca73df0-etc-machine-id\") pod \"cinder-api-0\" (UID: \"e063f6a5-dcd3-413d-bb65-e9ceeca73df0\") " pod="openstack/cinder-api-0" Sep 29 19:26:38 crc kubenswrapper[4779]: I0929 19:26:38.279033 4779 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"glance-default-internal-api-0\" (UID: \"880c493a-a9b5-4cdc-a4b1-256feeee3e1b\") device mount path \"/mnt/openstack/pv12\"" pod="openstack/glance-default-internal-api-0" Sep 29 19:26:38 crc kubenswrapper[4779]: I0929 19:26:38.279227 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/880c493a-a9b5-4cdc-a4b1-256feeee3e1b-logs\") pod \"glance-default-internal-api-0\" (UID: \"880c493a-a9b5-4cdc-a4b1-256feeee3e1b\") " pod="openstack/glance-default-internal-api-0" Sep 29 19:26:38 crc kubenswrapper[4779]: I0929 19:26:38.283429 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e063f6a5-dcd3-413d-bb65-e9ceeca73df0-logs\") pod \"cinder-api-0\" (UID: \"e063f6a5-dcd3-413d-bb65-e9ceeca73df0\") " pod="openstack/cinder-api-0" Sep 29 19:26:38 crc kubenswrapper[4779]: I0929 19:26:38.285938 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/880c493a-a9b5-4cdc-a4b1-256feeee3e1b-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"880c493a-a9b5-4cdc-a4b1-256feeee3e1b\") " pod="openstack/glance-default-internal-api-0" Sep 29 19:26:38 crc kubenswrapper[4779]: I0929 19:26:38.286611 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/880c493a-a9b5-4cdc-a4b1-256feeee3e1b-scripts\") pod \"glance-default-internal-api-0\" (UID: \"880c493a-a9b5-4cdc-a4b1-256feeee3e1b\") " pod="openstack/glance-default-internal-api-0" Sep 29 19:26:38 crc kubenswrapper[4779]: I0929 19:26:38.286790 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/e063f6a5-dcd3-413d-bb65-e9ceeca73df0-config-data-custom\") pod \"cinder-api-0\" (UID: \"e063f6a5-dcd3-413d-bb65-e9ceeca73df0\") " pod="openstack/cinder-api-0" Sep 29 19:26:38 crc kubenswrapper[4779]: I0929 19:26:38.291844 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/e063f6a5-dcd3-413d-bb65-e9ceeca73df0-public-tls-certs\") pod \"cinder-api-0\" (UID: \"e063f6a5-dcd3-413d-bb65-e9ceeca73df0\") " pod="openstack/cinder-api-0" Sep 29 19:26:38 crc kubenswrapper[4779]: I0929 19:26:38.292166 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e063f6a5-dcd3-413d-bb65-e9ceeca73df0-scripts\") pod \"cinder-api-0\" (UID: \"e063f6a5-dcd3-413d-bb65-e9ceeca73df0\") " pod="openstack/cinder-api-0" Sep 29 19:26:38 crc kubenswrapper[4779]: I0929 19:26:38.295460 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/880c493a-a9b5-4cdc-a4b1-256feeee3e1b-config-data\") pod \"glance-default-internal-api-0\" (UID: \"880c493a-a9b5-4cdc-a4b1-256feeee3e1b\") " pod="openstack/glance-default-internal-api-0" Sep 29 19:26:38 crc kubenswrapper[4779]: I0929 19:26:38.295894 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/880c493a-a9b5-4cdc-a4b1-256feeee3e1b-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"880c493a-a9b5-4cdc-a4b1-256feeee3e1b\") " pod="openstack/glance-default-internal-api-0" Sep 29 19:26:38 crc kubenswrapper[4779]: I0929 19:26:38.296484 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/880c493a-a9b5-4cdc-a4b1-256feeee3e1b-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"880c493a-a9b5-4cdc-a4b1-256feeee3e1b\") " pod="openstack/glance-default-internal-api-0" Sep 29 19:26:38 crc kubenswrapper[4779]: I0929 19:26:38.300925 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n522l\" (UniqueName: \"kubernetes.io/projected/880c493a-a9b5-4cdc-a4b1-256feeee3e1b-kube-api-access-n522l\") pod \"glance-default-internal-api-0\" (UID: \"880c493a-a9b5-4cdc-a4b1-256feeee3e1b\") " pod="openstack/glance-default-internal-api-0" Sep 29 19:26:38 crc kubenswrapper[4779]: I0929 19:26:38.301722 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cdzc7\" (UniqueName: \"kubernetes.io/projected/e063f6a5-dcd3-413d-bb65-e9ceeca73df0-kube-api-access-cdzc7\") pod \"cinder-api-0\" (UID: \"e063f6a5-dcd3-413d-bb65-e9ceeca73df0\") " pod="openstack/cinder-api-0" Sep 29 19:26:38 crc kubenswrapper[4779]: I0929 19:26:38.303475 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e063f6a5-dcd3-413d-bb65-e9ceeca73df0-config-data\") pod \"cinder-api-0\" (UID: \"e063f6a5-dcd3-413d-bb65-e9ceeca73df0\") " pod="openstack/cinder-api-0" Sep 29 19:26:38 crc kubenswrapper[4779]: I0929 19:26:38.304595 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/e063f6a5-dcd3-413d-bb65-e9ceeca73df0-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"e063f6a5-dcd3-413d-bb65-e9ceeca73df0\") " pod="openstack/cinder-api-0" Sep 29 19:26:38 crc kubenswrapper[4779]: I0929 19:26:38.312017 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e063f6a5-dcd3-413d-bb65-e9ceeca73df0-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"e063f6a5-dcd3-413d-bb65-e9ceeca73df0\") " pod="openstack/cinder-api-0" Sep 29 19:26:38 crc kubenswrapper[4779]: I0929 19:26:38.339654 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"glance-default-internal-api-0\" (UID: \"880c493a-a9b5-4cdc-a4b1-256feeee3e1b\") " pod="openstack/glance-default-internal-api-0" Sep 29 19:26:38 crc kubenswrapper[4779]: I0929 19:26:38.349691 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Sep 29 19:26:38 crc kubenswrapper[4779]: I0929 19:26:38.349929 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="be202ce5-0468-47b9-94bd-4bf15e9ad65b" containerName="glance-log" containerID="cri-o://a5bfae5491dc68369f3b0bb4f5458e92356ba49c7e40f4ad60f3cf763e8fed7a" gracePeriod=30 Sep 29 19:26:38 crc kubenswrapper[4779]: I0929 19:26:38.350373 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="be202ce5-0468-47b9-94bd-4bf15e9ad65b" containerName="glance-httpd" containerID="cri-o://efe227d05a85321bfef2305a94bd917d7139d78388dcc95dc3a82ee9ddc1be7c" gracePeriod=30 Sep 29 19:26:38 crc kubenswrapper[4779]: I0929 19:26:38.370849 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Sep 29 19:26:38 crc kubenswrapper[4779]: I0929 19:26:38.386122 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Sep 29 19:26:38 crc kubenswrapper[4779]: I0929 19:26:38.739325 4779 generic.go:334] "Generic (PLEG): container finished" podID="c5fc66bc-61c8-4f4c-838a-0ff89fbb9b1b" containerID="cdb240ceb8eaeb2910af62209da980e8c86e14a0fc927d62cdbdbeab07081cf7" exitCode=0 Sep 29 19:26:38 crc kubenswrapper[4779]: I0929 19:26:38.739627 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-544464c4cd-mfj4x" event={"ID":"c5fc66bc-61c8-4f4c-838a-0ff89fbb9b1b","Type":"ContainerDied","Data":"cdb240ceb8eaeb2910af62209da980e8c86e14a0fc927d62cdbdbeab07081cf7"} Sep 29 19:26:38 crc kubenswrapper[4779]: I0929 19:26:38.743515 4779 generic.go:334] "Generic (PLEG): container finished" podID="be202ce5-0468-47b9-94bd-4bf15e9ad65b" containerID="a5bfae5491dc68369f3b0bb4f5458e92356ba49c7e40f4ad60f3cf763e8fed7a" exitCode=143 Sep 29 19:26:38 crc kubenswrapper[4779]: I0929 19:26:38.743613 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"be202ce5-0468-47b9-94bd-4bf15e9ad65b","Type":"ContainerDied","Data":"a5bfae5491dc68369f3b0bb4f5458e92356ba49c7e40f4ad60f3cf763e8fed7a"} Sep 29 19:26:38 crc kubenswrapper[4779]: I0929 19:26:38.746666 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"92130510-fe09-43ca-af98-8242adf45dc4","Type":"ContainerStarted","Data":"081fd9057c47d5c2cc052e736aefd00f28bb925c0f3ccc98b11a7ad45b66744f"} Sep 29 19:26:38 crc kubenswrapper[4779]: I0929 19:26:38.773040 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-scheduler-0" podStartSLOduration=12.005869075 podStartE2EDuration="12.773021486s" podCreationTimestamp="2025-09-29 19:26:26 +0000 UTC" firstStartedPulling="2025-09-29 19:26:27.714669684 +0000 UTC m=+1098.599094784" lastFinishedPulling="2025-09-29 19:26:28.481822095 +0000 UTC m=+1099.366247195" observedRunningTime="2025-09-29 19:26:38.763709441 +0000 UTC m=+1109.648134541" watchObservedRunningTime="2025-09-29 19:26:38.773021486 +0000 UTC m=+1109.657446586" Sep 29 19:26:38 crc kubenswrapper[4779]: I0929 19:26:38.773535 4779 generic.go:334] "Generic (PLEG): container finished" podID="00a7767e-dcd8-4e64-97b0-6d5bf4ec280e" containerID="08cce7d93838bd1823e591ae7784a1a7ee1d2a92bffe784154d7706397159c99" exitCode=0 Sep 29 19:26:38 crc kubenswrapper[4779]: I0929 19:26:38.773560 4779 generic.go:334] "Generic (PLEG): container finished" podID="00a7767e-dcd8-4e64-97b0-6d5bf4ec280e" containerID="a9b8a0fd259cbd970917d192eb68c61afc4e21a0eb7d45ff714b4a84b9278dc8" exitCode=2 Sep 29 19:26:38 crc kubenswrapper[4779]: I0929 19:26:38.773566 4779 generic.go:334] "Generic (PLEG): container finished" podID="00a7767e-dcd8-4e64-97b0-6d5bf4ec280e" containerID="e1e73527fa5092322476e44a18898e4bad17d9da954b7048c3f8440d139315f3" exitCode=0 Sep 29 19:26:38 crc kubenswrapper[4779]: I0929 19:26:38.773613 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"00a7767e-dcd8-4e64-97b0-6d5bf4ec280e","Type":"ContainerDied","Data":"08cce7d93838bd1823e591ae7784a1a7ee1d2a92bffe784154d7706397159c99"} Sep 29 19:26:38 crc kubenswrapper[4779]: I0929 19:26:38.773638 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"00a7767e-dcd8-4e64-97b0-6d5bf4ec280e","Type":"ContainerDied","Data":"a9b8a0fd259cbd970917d192eb68c61afc4e21a0eb7d45ff714b4a84b9278dc8"} Sep 29 19:26:38 crc kubenswrapper[4779]: I0929 19:26:38.773650 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"00a7767e-dcd8-4e64-97b0-6d5bf4ec280e","Type":"ContainerDied","Data":"e1e73527fa5092322476e44a18898e4bad17d9da954b7048c3f8440d139315f3"} Sep 29 19:26:39 crc kubenswrapper[4779]: I0929 19:26:39.069706 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Sep 29 19:26:39 crc kubenswrapper[4779]: I0929 19:26:39.087459 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Sep 29 19:26:39 crc kubenswrapper[4779]: I0929 19:26:39.245233 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-544464c4cd-mfj4x" Sep 29 19:26:39 crc kubenswrapper[4779]: I0929 19:26:39.315050 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c5fc66bc-61c8-4f4c-838a-0ff89fbb9b1b-combined-ca-bundle\") pod \"c5fc66bc-61c8-4f4c-838a-0ff89fbb9b1b\" (UID: \"c5fc66bc-61c8-4f4c-838a-0ff89fbb9b1b\") " Sep 29 19:26:39 crc kubenswrapper[4779]: I0929 19:26:39.315127 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c5fc66bc-61c8-4f4c-838a-0ff89fbb9b1b-logs\") pod \"c5fc66bc-61c8-4f4c-838a-0ff89fbb9b1b\" (UID: \"c5fc66bc-61c8-4f4c-838a-0ff89fbb9b1b\") " Sep 29 19:26:39 crc kubenswrapper[4779]: I0929 19:26:39.315149 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/c5fc66bc-61c8-4f4c-838a-0ff89fbb9b1b-config-data-custom\") pod \"c5fc66bc-61c8-4f4c-838a-0ff89fbb9b1b\" (UID: \"c5fc66bc-61c8-4f4c-838a-0ff89fbb9b1b\") " Sep 29 19:26:39 crc kubenswrapper[4779]: I0929 19:26:39.315195 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fsqk4\" (UniqueName: \"kubernetes.io/projected/c5fc66bc-61c8-4f4c-838a-0ff89fbb9b1b-kube-api-access-fsqk4\") pod \"c5fc66bc-61c8-4f4c-838a-0ff89fbb9b1b\" (UID: \"c5fc66bc-61c8-4f4c-838a-0ff89fbb9b1b\") " Sep 29 19:26:39 crc kubenswrapper[4779]: I0929 19:26:39.315296 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c5fc66bc-61c8-4f4c-838a-0ff89fbb9b1b-config-data\") pod \"c5fc66bc-61c8-4f4c-838a-0ff89fbb9b1b\" (UID: \"c5fc66bc-61c8-4f4c-838a-0ff89fbb9b1b\") " Sep 29 19:26:39 crc kubenswrapper[4779]: I0929 19:26:39.316991 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c5fc66bc-61c8-4f4c-838a-0ff89fbb9b1b-logs" (OuterVolumeSpecName: "logs") pod "c5fc66bc-61c8-4f4c-838a-0ff89fbb9b1b" (UID: "c5fc66bc-61c8-4f4c-838a-0ff89fbb9b1b"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 19:26:39 crc kubenswrapper[4779]: I0929 19:26:39.324483 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c5fc66bc-61c8-4f4c-838a-0ff89fbb9b1b-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "c5fc66bc-61c8-4f4c-838a-0ff89fbb9b1b" (UID: "c5fc66bc-61c8-4f4c-838a-0ff89fbb9b1b"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:26:39 crc kubenswrapper[4779]: I0929 19:26:39.329708 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c5fc66bc-61c8-4f4c-838a-0ff89fbb9b1b-kube-api-access-fsqk4" (OuterVolumeSpecName: "kube-api-access-fsqk4") pod "c5fc66bc-61c8-4f4c-838a-0ff89fbb9b1b" (UID: "c5fc66bc-61c8-4f4c-838a-0ff89fbb9b1b"). InnerVolumeSpecName "kube-api-access-fsqk4". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:26:39 crc kubenswrapper[4779]: I0929 19:26:39.380423 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c5fc66bc-61c8-4f4c-838a-0ff89fbb9b1b-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "c5fc66bc-61c8-4f4c-838a-0ff89fbb9b1b" (UID: "c5fc66bc-61c8-4f4c-838a-0ff89fbb9b1b"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:26:39 crc kubenswrapper[4779]: I0929 19:26:39.403400 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c5fc66bc-61c8-4f4c-838a-0ff89fbb9b1b-config-data" (OuterVolumeSpecName: "config-data") pod "c5fc66bc-61c8-4f4c-838a-0ff89fbb9b1b" (UID: "c5fc66bc-61c8-4f4c-838a-0ff89fbb9b1b"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:26:39 crc kubenswrapper[4779]: I0929 19:26:39.418375 4779 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c5fc66bc-61c8-4f4c-838a-0ff89fbb9b1b-logs\") on node \"crc\" DevicePath \"\"" Sep 29 19:26:39 crc kubenswrapper[4779]: I0929 19:26:39.418398 4779 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/c5fc66bc-61c8-4f4c-838a-0ff89fbb9b1b-config-data-custom\") on node \"crc\" DevicePath \"\"" Sep 29 19:26:39 crc kubenswrapper[4779]: I0929 19:26:39.418409 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fsqk4\" (UniqueName: \"kubernetes.io/projected/c5fc66bc-61c8-4f4c-838a-0ff89fbb9b1b-kube-api-access-fsqk4\") on node \"crc\" DevicePath \"\"" Sep 29 19:26:39 crc kubenswrapper[4779]: I0929 19:26:39.418417 4779 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c5fc66bc-61c8-4f4c-838a-0ff89fbb9b1b-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 19:26:39 crc kubenswrapper[4779]: I0929 19:26:39.418427 4779 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c5fc66bc-61c8-4f4c-838a-0ff89fbb9b1b-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 19:26:39 crc kubenswrapper[4779]: I0929 19:26:39.446108 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-6f7f5b6d48-8js86" Sep 29 19:26:39 crc kubenswrapper[4779]: I0929 19:26:39.456622 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 19:26:39 crc kubenswrapper[4779]: I0929 19:26:39.519020 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/00a7767e-dcd8-4e64-97b0-6d5bf4ec280e-sg-core-conf-yaml\") pod \"00a7767e-dcd8-4e64-97b0-6d5bf4ec280e\" (UID: \"00a7767e-dcd8-4e64-97b0-6d5bf4ec280e\") " Sep 29 19:26:39 crc kubenswrapper[4779]: I0929 19:26:39.519069 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/442ef8d4-8019-432f-8715-9b2a5aaaa022-config-data\") pod \"442ef8d4-8019-432f-8715-9b2a5aaaa022\" (UID: \"442ef8d4-8019-432f-8715-9b2a5aaaa022\") " Sep 29 19:26:39 crc kubenswrapper[4779]: I0929 19:26:39.519125 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/00a7767e-dcd8-4e64-97b0-6d5bf4ec280e-combined-ca-bundle\") pod \"00a7767e-dcd8-4e64-97b0-6d5bf4ec280e\" (UID: \"00a7767e-dcd8-4e64-97b0-6d5bf4ec280e\") " Sep 29 19:26:39 crc kubenswrapper[4779]: I0929 19:26:39.519176 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/442ef8d4-8019-432f-8715-9b2a5aaaa022-logs\") pod \"442ef8d4-8019-432f-8715-9b2a5aaaa022\" (UID: \"442ef8d4-8019-432f-8715-9b2a5aaaa022\") " Sep 29 19:26:39 crc kubenswrapper[4779]: I0929 19:26:39.519462 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/00a7767e-dcd8-4e64-97b0-6d5bf4ec280e-log-httpd\") pod \"00a7767e-dcd8-4e64-97b0-6d5bf4ec280e\" (UID: \"00a7767e-dcd8-4e64-97b0-6d5bf4ec280e\") " Sep 29 19:26:39 crc kubenswrapper[4779]: I0929 19:26:39.519541 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/442ef8d4-8019-432f-8715-9b2a5aaaa022-horizon-secret-key\") pod \"442ef8d4-8019-432f-8715-9b2a5aaaa022\" (UID: \"442ef8d4-8019-432f-8715-9b2a5aaaa022\") " Sep 29 19:26:39 crc kubenswrapper[4779]: I0929 19:26:39.519579 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/00a7767e-dcd8-4e64-97b0-6d5bf4ec280e-run-httpd\") pod \"00a7767e-dcd8-4e64-97b0-6d5bf4ec280e\" (UID: \"00a7767e-dcd8-4e64-97b0-6d5bf4ec280e\") " Sep 29 19:26:39 crc kubenswrapper[4779]: I0929 19:26:39.519637 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/442ef8d4-8019-432f-8715-9b2a5aaaa022-horizon-tls-certs\") pod \"442ef8d4-8019-432f-8715-9b2a5aaaa022\" (UID: \"442ef8d4-8019-432f-8715-9b2a5aaaa022\") " Sep 29 19:26:39 crc kubenswrapper[4779]: I0929 19:26:39.519667 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/00a7767e-dcd8-4e64-97b0-6d5bf4ec280e-config-data\") pod \"00a7767e-dcd8-4e64-97b0-6d5bf4ec280e\" (UID: \"00a7767e-dcd8-4e64-97b0-6d5bf4ec280e\") " Sep 29 19:26:39 crc kubenswrapper[4779]: I0929 19:26:39.519696 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gsd8l\" (UniqueName: \"kubernetes.io/projected/442ef8d4-8019-432f-8715-9b2a5aaaa022-kube-api-access-gsd8l\") pod \"442ef8d4-8019-432f-8715-9b2a5aaaa022\" (UID: \"442ef8d4-8019-432f-8715-9b2a5aaaa022\") " Sep 29 19:26:39 crc kubenswrapper[4779]: I0929 19:26:39.519724 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/00a7767e-dcd8-4e64-97b0-6d5bf4ec280e-scripts\") pod \"00a7767e-dcd8-4e64-97b0-6d5bf4ec280e\" (UID: \"00a7767e-dcd8-4e64-97b0-6d5bf4ec280e\") " Sep 29 19:26:39 crc kubenswrapper[4779]: I0929 19:26:39.519760 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/442ef8d4-8019-432f-8715-9b2a5aaaa022-scripts\") pod \"442ef8d4-8019-432f-8715-9b2a5aaaa022\" (UID: \"442ef8d4-8019-432f-8715-9b2a5aaaa022\") " Sep 29 19:26:39 crc kubenswrapper[4779]: I0929 19:26:39.519785 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/442ef8d4-8019-432f-8715-9b2a5aaaa022-combined-ca-bundle\") pod \"442ef8d4-8019-432f-8715-9b2a5aaaa022\" (UID: \"442ef8d4-8019-432f-8715-9b2a5aaaa022\") " Sep 29 19:26:39 crc kubenswrapper[4779]: I0929 19:26:39.519807 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-r77fs\" (UniqueName: \"kubernetes.io/projected/00a7767e-dcd8-4e64-97b0-6d5bf4ec280e-kube-api-access-r77fs\") pod \"00a7767e-dcd8-4e64-97b0-6d5bf4ec280e\" (UID: \"00a7767e-dcd8-4e64-97b0-6d5bf4ec280e\") " Sep 29 19:26:39 crc kubenswrapper[4779]: I0929 19:26:39.521662 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/442ef8d4-8019-432f-8715-9b2a5aaaa022-logs" (OuterVolumeSpecName: "logs") pod "442ef8d4-8019-432f-8715-9b2a5aaaa022" (UID: "442ef8d4-8019-432f-8715-9b2a5aaaa022"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 19:26:39 crc kubenswrapper[4779]: I0929 19:26:39.525863 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/00a7767e-dcd8-4e64-97b0-6d5bf4ec280e-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "00a7767e-dcd8-4e64-97b0-6d5bf4ec280e" (UID: "00a7767e-dcd8-4e64-97b0-6d5bf4ec280e"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 19:26:39 crc kubenswrapper[4779]: I0929 19:26:39.528471 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/00a7767e-dcd8-4e64-97b0-6d5bf4ec280e-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "00a7767e-dcd8-4e64-97b0-6d5bf4ec280e" (UID: "00a7767e-dcd8-4e64-97b0-6d5bf4ec280e"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 19:26:39 crc kubenswrapper[4779]: I0929 19:26:39.535522 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/442ef8d4-8019-432f-8715-9b2a5aaaa022-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "442ef8d4-8019-432f-8715-9b2a5aaaa022" (UID: "442ef8d4-8019-432f-8715-9b2a5aaaa022"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:26:39 crc kubenswrapper[4779]: I0929 19:26:39.536484 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/442ef8d4-8019-432f-8715-9b2a5aaaa022-kube-api-access-gsd8l" (OuterVolumeSpecName: "kube-api-access-gsd8l") pod "442ef8d4-8019-432f-8715-9b2a5aaaa022" (UID: "442ef8d4-8019-432f-8715-9b2a5aaaa022"). InnerVolumeSpecName "kube-api-access-gsd8l". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:26:39 crc kubenswrapper[4779]: I0929 19:26:39.536539 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/00a7767e-dcd8-4e64-97b0-6d5bf4ec280e-kube-api-access-r77fs" (OuterVolumeSpecName: "kube-api-access-r77fs") pod "00a7767e-dcd8-4e64-97b0-6d5bf4ec280e" (UID: "00a7767e-dcd8-4e64-97b0-6d5bf4ec280e"). InnerVolumeSpecName "kube-api-access-r77fs". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:26:39 crc kubenswrapper[4779]: I0929 19:26:39.547439 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/00a7767e-dcd8-4e64-97b0-6d5bf4ec280e-scripts" (OuterVolumeSpecName: "scripts") pod "00a7767e-dcd8-4e64-97b0-6d5bf4ec280e" (UID: "00a7767e-dcd8-4e64-97b0-6d5bf4ec280e"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:26:39 crc kubenswrapper[4779]: I0929 19:26:39.571371 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/442ef8d4-8019-432f-8715-9b2a5aaaa022-config-data" (OuterVolumeSpecName: "config-data") pod "442ef8d4-8019-432f-8715-9b2a5aaaa022" (UID: "442ef8d4-8019-432f-8715-9b2a5aaaa022"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:26:39 crc kubenswrapper[4779]: I0929 19:26:39.582083 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/442ef8d4-8019-432f-8715-9b2a5aaaa022-scripts" (OuterVolumeSpecName: "scripts") pod "442ef8d4-8019-432f-8715-9b2a5aaaa022" (UID: "442ef8d4-8019-432f-8715-9b2a5aaaa022"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:26:39 crc kubenswrapper[4779]: I0929 19:26:39.585424 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/00a7767e-dcd8-4e64-97b0-6d5bf4ec280e-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "00a7767e-dcd8-4e64-97b0-6d5bf4ec280e" (UID: "00a7767e-dcd8-4e64-97b0-6d5bf4ec280e"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:26:39 crc kubenswrapper[4779]: I0929 19:26:39.594686 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/442ef8d4-8019-432f-8715-9b2a5aaaa022-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "442ef8d4-8019-432f-8715-9b2a5aaaa022" (UID: "442ef8d4-8019-432f-8715-9b2a5aaaa022"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:26:39 crc kubenswrapper[4779]: I0929 19:26:39.602708 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/442ef8d4-8019-432f-8715-9b2a5aaaa022-horizon-tls-certs" (OuterVolumeSpecName: "horizon-tls-certs") pod "442ef8d4-8019-432f-8715-9b2a5aaaa022" (UID: "442ef8d4-8019-432f-8715-9b2a5aaaa022"). InnerVolumeSpecName "horizon-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:26:39 crc kubenswrapper[4779]: I0929 19:26:39.622169 4779 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/00a7767e-dcd8-4e64-97b0-6d5bf4ec280e-run-httpd\") on node \"crc\" DevicePath \"\"" Sep 29 19:26:39 crc kubenswrapper[4779]: I0929 19:26:39.622201 4779 reconciler_common.go:293] "Volume detached for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/442ef8d4-8019-432f-8715-9b2a5aaaa022-horizon-tls-certs\") on node \"crc\" DevicePath \"\"" Sep 29 19:26:39 crc kubenswrapper[4779]: I0929 19:26:39.622211 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gsd8l\" (UniqueName: \"kubernetes.io/projected/442ef8d4-8019-432f-8715-9b2a5aaaa022-kube-api-access-gsd8l\") on node \"crc\" DevicePath \"\"" Sep 29 19:26:39 crc kubenswrapper[4779]: I0929 19:26:39.622220 4779 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/00a7767e-dcd8-4e64-97b0-6d5bf4ec280e-scripts\") on node \"crc\" DevicePath \"\"" Sep 29 19:26:39 crc kubenswrapper[4779]: I0929 19:26:39.622248 4779 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/442ef8d4-8019-432f-8715-9b2a5aaaa022-scripts\") on node \"crc\" DevicePath \"\"" Sep 29 19:26:39 crc kubenswrapper[4779]: I0929 19:26:39.622257 4779 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/442ef8d4-8019-432f-8715-9b2a5aaaa022-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 19:26:39 crc kubenswrapper[4779]: I0929 19:26:39.622265 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-r77fs\" (UniqueName: \"kubernetes.io/projected/00a7767e-dcd8-4e64-97b0-6d5bf4ec280e-kube-api-access-r77fs\") on node \"crc\" DevicePath \"\"" Sep 29 19:26:39 crc kubenswrapper[4779]: I0929 19:26:39.622273 4779 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/00a7767e-dcd8-4e64-97b0-6d5bf4ec280e-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Sep 29 19:26:39 crc kubenswrapper[4779]: I0929 19:26:39.622281 4779 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/442ef8d4-8019-432f-8715-9b2a5aaaa022-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 19:26:39 crc kubenswrapper[4779]: I0929 19:26:39.622289 4779 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/442ef8d4-8019-432f-8715-9b2a5aaaa022-logs\") on node \"crc\" DevicePath \"\"" Sep 29 19:26:39 crc kubenswrapper[4779]: I0929 19:26:39.622296 4779 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/00a7767e-dcd8-4e64-97b0-6d5bf4ec280e-log-httpd\") on node \"crc\" DevicePath \"\"" Sep 29 19:26:39 crc kubenswrapper[4779]: I0929 19:26:39.622304 4779 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/442ef8d4-8019-432f-8715-9b2a5aaaa022-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Sep 29 19:26:39 crc kubenswrapper[4779]: I0929 19:26:39.629889 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/00a7767e-dcd8-4e64-97b0-6d5bf4ec280e-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "00a7767e-dcd8-4e64-97b0-6d5bf4ec280e" (UID: "00a7767e-dcd8-4e64-97b0-6d5bf4ec280e"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:26:39 crc kubenswrapper[4779]: I0929 19:26:39.640393 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/00a7767e-dcd8-4e64-97b0-6d5bf4ec280e-config-data" (OuterVolumeSpecName: "config-data") pod "00a7767e-dcd8-4e64-97b0-6d5bf4ec280e" (UID: "00a7767e-dcd8-4e64-97b0-6d5bf4ec280e"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:26:39 crc kubenswrapper[4779]: I0929 19:26:39.723429 4779 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/00a7767e-dcd8-4e64-97b0-6d5bf4ec280e-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 19:26:39 crc kubenswrapper[4779]: I0929 19:26:39.723469 4779 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/00a7767e-dcd8-4e64-97b0-6d5bf4ec280e-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 19:26:39 crc kubenswrapper[4779]: I0929 19:26:39.779188 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="17ccef11-d761-408d-9d61-717ea6d7b9c3" path="/var/lib/kubelet/pods/17ccef11-d761-408d-9d61-717ea6d7b9c3/volumes" Sep 29 19:26:39 crc kubenswrapper[4779]: I0929 19:26:39.780453 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7d6613f2-eca3-41c7-86c4-7c2726764f27" path="/var/lib/kubelet/pods/7d6613f2-eca3-41c7-86c4-7c2726764f27/volumes" Sep 29 19:26:39 crc kubenswrapper[4779]: I0929 19:26:39.782064 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="81b272b7-f428-4787-b48e-3afcf7e4c8d0" path="/var/lib/kubelet/pods/81b272b7-f428-4787-b48e-3afcf7e4c8d0/volumes" Sep 29 19:26:39 crc kubenswrapper[4779]: I0929 19:26:39.819424 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-544464c4cd-mfj4x" event={"ID":"c5fc66bc-61c8-4f4c-838a-0ff89fbb9b1b","Type":"ContainerDied","Data":"579b2152d0212dc8e8e9af5cd495d0bd28e6751190dc8d7d11da3aba1f071076"} Sep 29 19:26:39 crc kubenswrapper[4779]: I0929 19:26:39.819480 4779 scope.go:117] "RemoveContainer" containerID="cdb240ceb8eaeb2910af62209da980e8c86e14a0fc927d62cdbdbeab07081cf7" Sep 29 19:26:39 crc kubenswrapper[4779]: I0929 19:26:39.819546 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-544464c4cd-mfj4x" Sep 29 19:26:39 crc kubenswrapper[4779]: I0929 19:26:39.821109 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"880c493a-a9b5-4cdc-a4b1-256feeee3e1b","Type":"ContainerStarted","Data":"e940454329298e35d1876d3aa9df61875e7f2dc4cc0c75ff572fefb1896a308a"} Sep 29 19:26:39 crc kubenswrapper[4779]: I0929 19:26:39.823470 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"e063f6a5-dcd3-413d-bb65-e9ceeca73df0","Type":"ContainerStarted","Data":"56f1a7b80837ee0c82d49a6b50328a90c73b3462d04ef778215ed88488edb681"} Sep 29 19:26:39 crc kubenswrapper[4779]: I0929 19:26:39.831898 4779 generic.go:334] "Generic (PLEG): container finished" podID="442ef8d4-8019-432f-8715-9b2a5aaaa022" containerID="193924d7b40387c777023562b1dc745c8cabd5533a384e043e26a919e41b5bb1" exitCode=137 Sep 29 19:26:39 crc kubenswrapper[4779]: I0929 19:26:39.831946 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-6f7f5b6d48-8js86" Sep 29 19:26:39 crc kubenswrapper[4779]: I0929 19:26:39.831985 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-6f7f5b6d48-8js86" event={"ID":"442ef8d4-8019-432f-8715-9b2a5aaaa022","Type":"ContainerDied","Data":"193924d7b40387c777023562b1dc745c8cabd5533a384e043e26a919e41b5bb1"} Sep 29 19:26:39 crc kubenswrapper[4779]: I0929 19:26:39.832009 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-6f7f5b6d48-8js86" event={"ID":"442ef8d4-8019-432f-8715-9b2a5aaaa022","Type":"ContainerDied","Data":"411d397596b00ed8248c8e9e33024f632fc074c50d520f1bc0b2dfd34db9dc6f"} Sep 29 19:26:39 crc kubenswrapper[4779]: I0929 19:26:39.834994 4779 generic.go:334] "Generic (PLEG): container finished" podID="00a7767e-dcd8-4e64-97b0-6d5bf4ec280e" containerID="0ef60d95f076d975cbaa63b5588150976ee184cdf1da3378acf55aa6ef20a00d" exitCode=0 Sep 29 19:26:39 crc kubenswrapper[4779]: I0929 19:26:39.835046 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 19:26:39 crc kubenswrapper[4779]: I0929 19:26:39.835094 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"00a7767e-dcd8-4e64-97b0-6d5bf4ec280e","Type":"ContainerDied","Data":"0ef60d95f076d975cbaa63b5588150976ee184cdf1da3378acf55aa6ef20a00d"} Sep 29 19:26:39 crc kubenswrapper[4779]: I0929 19:26:39.835115 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"00a7767e-dcd8-4e64-97b0-6d5bf4ec280e","Type":"ContainerDied","Data":"885f48ef1cf6dc7e0f37cec57a345bc72511344cc1f5dbd516586fde97ecd7c4"} Sep 29 19:26:39 crc kubenswrapper[4779]: I0929 19:26:39.849824 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-544464c4cd-mfj4x"] Sep 29 19:26:39 crc kubenswrapper[4779]: I0929 19:26:39.858138 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-api-544464c4cd-mfj4x"] Sep 29 19:26:39 crc kubenswrapper[4779]: I0929 19:26:39.862389 4779 scope.go:117] "RemoveContainer" containerID="af5251944fd6d520afeac767ee3873c44eb12ca258f31d70f801e11d2cf19e0f" Sep 29 19:26:39 crc kubenswrapper[4779]: I0929 19:26:39.889780 4779 scope.go:117] "RemoveContainer" containerID="a7358d5a6b25fe588711b862903a2db92f9bbab4cc48eab5bf60c35eecf79ece" Sep 29 19:26:39 crc kubenswrapper[4779]: I0929 19:26:39.889936 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-6f7f5b6d48-8js86"] Sep 29 19:26:39 crc kubenswrapper[4779]: I0929 19:26:39.893206 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-6f7f5b6d48-8js86"] Sep 29 19:26:39 crc kubenswrapper[4779]: I0929 19:26:39.915471 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Sep 29 19:26:39 crc kubenswrapper[4779]: I0929 19:26:39.922571 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Sep 29 19:26:39 crc kubenswrapper[4779]: I0929 19:26:39.932392 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Sep 29 19:26:39 crc kubenswrapper[4779]: E0929 19:26:39.932828 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c5fc66bc-61c8-4f4c-838a-0ff89fbb9b1b" containerName="barbican-api" Sep 29 19:26:39 crc kubenswrapper[4779]: I0929 19:26:39.932848 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="c5fc66bc-61c8-4f4c-838a-0ff89fbb9b1b" containerName="barbican-api" Sep 29 19:26:39 crc kubenswrapper[4779]: E0929 19:26:39.932868 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="442ef8d4-8019-432f-8715-9b2a5aaaa022" containerName="horizon-log" Sep 29 19:26:39 crc kubenswrapper[4779]: I0929 19:26:39.932875 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="442ef8d4-8019-432f-8715-9b2a5aaaa022" containerName="horizon-log" Sep 29 19:26:39 crc kubenswrapper[4779]: E0929 19:26:39.932894 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="00a7767e-dcd8-4e64-97b0-6d5bf4ec280e" containerName="sg-core" Sep 29 19:26:39 crc kubenswrapper[4779]: I0929 19:26:39.932903 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="00a7767e-dcd8-4e64-97b0-6d5bf4ec280e" containerName="sg-core" Sep 29 19:26:39 crc kubenswrapper[4779]: E0929 19:26:39.932914 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="00a7767e-dcd8-4e64-97b0-6d5bf4ec280e" containerName="proxy-httpd" Sep 29 19:26:39 crc kubenswrapper[4779]: I0929 19:26:39.932921 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="00a7767e-dcd8-4e64-97b0-6d5bf4ec280e" containerName="proxy-httpd" Sep 29 19:26:39 crc kubenswrapper[4779]: E0929 19:26:39.932935 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="00a7767e-dcd8-4e64-97b0-6d5bf4ec280e" containerName="ceilometer-central-agent" Sep 29 19:26:39 crc kubenswrapper[4779]: I0929 19:26:39.932943 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="00a7767e-dcd8-4e64-97b0-6d5bf4ec280e" containerName="ceilometer-central-agent" Sep 29 19:26:39 crc kubenswrapper[4779]: E0929 19:26:39.932960 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="442ef8d4-8019-432f-8715-9b2a5aaaa022" containerName="horizon" Sep 29 19:26:39 crc kubenswrapper[4779]: I0929 19:26:39.932967 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="442ef8d4-8019-432f-8715-9b2a5aaaa022" containerName="horizon" Sep 29 19:26:39 crc kubenswrapper[4779]: E0929 19:26:39.932981 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c5fc66bc-61c8-4f4c-838a-0ff89fbb9b1b" containerName="barbican-api-log" Sep 29 19:26:39 crc kubenswrapper[4779]: I0929 19:26:39.932988 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="c5fc66bc-61c8-4f4c-838a-0ff89fbb9b1b" containerName="barbican-api-log" Sep 29 19:26:39 crc kubenswrapper[4779]: E0929 19:26:39.933000 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="00a7767e-dcd8-4e64-97b0-6d5bf4ec280e" containerName="ceilometer-notification-agent" Sep 29 19:26:39 crc kubenswrapper[4779]: I0929 19:26:39.933010 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="00a7767e-dcd8-4e64-97b0-6d5bf4ec280e" containerName="ceilometer-notification-agent" Sep 29 19:26:39 crc kubenswrapper[4779]: I0929 19:26:39.933166 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="c5fc66bc-61c8-4f4c-838a-0ff89fbb9b1b" containerName="barbican-api-log" Sep 29 19:26:39 crc kubenswrapper[4779]: I0929 19:26:39.933175 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="00a7767e-dcd8-4e64-97b0-6d5bf4ec280e" containerName="ceilometer-central-agent" Sep 29 19:26:39 crc kubenswrapper[4779]: I0929 19:26:39.933185 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="00a7767e-dcd8-4e64-97b0-6d5bf4ec280e" containerName="proxy-httpd" Sep 29 19:26:39 crc kubenswrapper[4779]: I0929 19:26:39.933200 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="442ef8d4-8019-432f-8715-9b2a5aaaa022" containerName="horizon-log" Sep 29 19:26:39 crc kubenswrapper[4779]: I0929 19:26:39.933210 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="00a7767e-dcd8-4e64-97b0-6d5bf4ec280e" containerName="ceilometer-notification-agent" Sep 29 19:26:39 crc kubenswrapper[4779]: I0929 19:26:39.933219 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="00a7767e-dcd8-4e64-97b0-6d5bf4ec280e" containerName="sg-core" Sep 29 19:26:39 crc kubenswrapper[4779]: I0929 19:26:39.933233 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="c5fc66bc-61c8-4f4c-838a-0ff89fbb9b1b" containerName="barbican-api" Sep 29 19:26:39 crc kubenswrapper[4779]: I0929 19:26:39.933240 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="442ef8d4-8019-432f-8715-9b2a5aaaa022" containerName="horizon" Sep 29 19:26:39 crc kubenswrapper[4779]: I0929 19:26:39.938247 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 19:26:39 crc kubenswrapper[4779]: I0929 19:26:39.942494 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 29 19:26:39 crc kubenswrapper[4779]: I0929 19:26:39.952043 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Sep 29 19:26:39 crc kubenswrapper[4779]: I0929 19:26:39.952041 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Sep 29 19:26:40 crc kubenswrapper[4779]: I0929 19:26:40.027036 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/5e620ad7-10e9-4b45-85f4-a4971dc70cd9-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"5e620ad7-10e9-4b45-85f4-a4971dc70cd9\") " pod="openstack/ceilometer-0" Sep 29 19:26:40 crc kubenswrapper[4779]: I0929 19:26:40.027135 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5e620ad7-10e9-4b45-85f4-a4971dc70cd9-run-httpd\") pod \"ceilometer-0\" (UID: \"5e620ad7-10e9-4b45-85f4-a4971dc70cd9\") " pod="openstack/ceilometer-0" Sep 29 19:26:40 crc kubenswrapper[4779]: I0929 19:26:40.027158 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5e620ad7-10e9-4b45-85f4-a4971dc70cd9-scripts\") pod \"ceilometer-0\" (UID: \"5e620ad7-10e9-4b45-85f4-a4971dc70cd9\") " pod="openstack/ceilometer-0" Sep 29 19:26:40 crc kubenswrapper[4779]: I0929 19:26:40.027211 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-62rh5\" (UniqueName: \"kubernetes.io/projected/5e620ad7-10e9-4b45-85f4-a4971dc70cd9-kube-api-access-62rh5\") pod \"ceilometer-0\" (UID: \"5e620ad7-10e9-4b45-85f4-a4971dc70cd9\") " pod="openstack/ceilometer-0" Sep 29 19:26:40 crc kubenswrapper[4779]: I0929 19:26:40.027301 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5e620ad7-10e9-4b45-85f4-a4971dc70cd9-config-data\") pod \"ceilometer-0\" (UID: \"5e620ad7-10e9-4b45-85f4-a4971dc70cd9\") " pod="openstack/ceilometer-0" Sep 29 19:26:40 crc kubenswrapper[4779]: I0929 19:26:40.027667 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5e620ad7-10e9-4b45-85f4-a4971dc70cd9-log-httpd\") pod \"ceilometer-0\" (UID: \"5e620ad7-10e9-4b45-85f4-a4971dc70cd9\") " pod="openstack/ceilometer-0" Sep 29 19:26:40 crc kubenswrapper[4779]: I0929 19:26:40.027783 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5e620ad7-10e9-4b45-85f4-a4971dc70cd9-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"5e620ad7-10e9-4b45-85f4-a4971dc70cd9\") " pod="openstack/ceilometer-0" Sep 29 19:26:40 crc kubenswrapper[4779]: I0929 19:26:40.069210 4779 scope.go:117] "RemoveContainer" containerID="193924d7b40387c777023562b1dc745c8cabd5533a384e043e26a919e41b5bb1" Sep 29 19:26:40 crc kubenswrapper[4779]: I0929 19:26:40.129727 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-62rh5\" (UniqueName: \"kubernetes.io/projected/5e620ad7-10e9-4b45-85f4-a4971dc70cd9-kube-api-access-62rh5\") pod \"ceilometer-0\" (UID: \"5e620ad7-10e9-4b45-85f4-a4971dc70cd9\") " pod="openstack/ceilometer-0" Sep 29 19:26:40 crc kubenswrapper[4779]: I0929 19:26:40.129780 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5e620ad7-10e9-4b45-85f4-a4971dc70cd9-config-data\") pod \"ceilometer-0\" (UID: \"5e620ad7-10e9-4b45-85f4-a4971dc70cd9\") " pod="openstack/ceilometer-0" Sep 29 19:26:40 crc kubenswrapper[4779]: I0929 19:26:40.129808 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5e620ad7-10e9-4b45-85f4-a4971dc70cd9-log-httpd\") pod \"ceilometer-0\" (UID: \"5e620ad7-10e9-4b45-85f4-a4971dc70cd9\") " pod="openstack/ceilometer-0" Sep 29 19:26:40 crc kubenswrapper[4779]: I0929 19:26:40.129841 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5e620ad7-10e9-4b45-85f4-a4971dc70cd9-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"5e620ad7-10e9-4b45-85f4-a4971dc70cd9\") " pod="openstack/ceilometer-0" Sep 29 19:26:40 crc kubenswrapper[4779]: I0929 19:26:40.129871 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/5e620ad7-10e9-4b45-85f4-a4971dc70cd9-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"5e620ad7-10e9-4b45-85f4-a4971dc70cd9\") " pod="openstack/ceilometer-0" Sep 29 19:26:40 crc kubenswrapper[4779]: I0929 19:26:40.129932 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5e620ad7-10e9-4b45-85f4-a4971dc70cd9-run-httpd\") pod \"ceilometer-0\" (UID: \"5e620ad7-10e9-4b45-85f4-a4971dc70cd9\") " pod="openstack/ceilometer-0" Sep 29 19:26:40 crc kubenswrapper[4779]: I0929 19:26:40.129950 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5e620ad7-10e9-4b45-85f4-a4971dc70cd9-scripts\") pod \"ceilometer-0\" (UID: \"5e620ad7-10e9-4b45-85f4-a4971dc70cd9\") " pod="openstack/ceilometer-0" Sep 29 19:26:40 crc kubenswrapper[4779]: I0929 19:26:40.132524 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5e620ad7-10e9-4b45-85f4-a4971dc70cd9-log-httpd\") pod \"ceilometer-0\" (UID: \"5e620ad7-10e9-4b45-85f4-a4971dc70cd9\") " pod="openstack/ceilometer-0" Sep 29 19:26:40 crc kubenswrapper[4779]: I0929 19:26:40.132777 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5e620ad7-10e9-4b45-85f4-a4971dc70cd9-run-httpd\") pod \"ceilometer-0\" (UID: \"5e620ad7-10e9-4b45-85f4-a4971dc70cd9\") " pod="openstack/ceilometer-0" Sep 29 19:26:40 crc kubenswrapper[4779]: I0929 19:26:40.138902 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5e620ad7-10e9-4b45-85f4-a4971dc70cd9-scripts\") pod \"ceilometer-0\" (UID: \"5e620ad7-10e9-4b45-85f4-a4971dc70cd9\") " pod="openstack/ceilometer-0" Sep 29 19:26:40 crc kubenswrapper[4779]: I0929 19:26:40.139267 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5e620ad7-10e9-4b45-85f4-a4971dc70cd9-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"5e620ad7-10e9-4b45-85f4-a4971dc70cd9\") " pod="openstack/ceilometer-0" Sep 29 19:26:40 crc kubenswrapper[4779]: I0929 19:26:40.139290 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/5e620ad7-10e9-4b45-85f4-a4971dc70cd9-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"5e620ad7-10e9-4b45-85f4-a4971dc70cd9\") " pod="openstack/ceilometer-0" Sep 29 19:26:40 crc kubenswrapper[4779]: I0929 19:26:40.140765 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5e620ad7-10e9-4b45-85f4-a4971dc70cd9-config-data\") pod \"ceilometer-0\" (UID: \"5e620ad7-10e9-4b45-85f4-a4971dc70cd9\") " pod="openstack/ceilometer-0" Sep 29 19:26:40 crc kubenswrapper[4779]: I0929 19:26:40.147768 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-62rh5\" (UniqueName: \"kubernetes.io/projected/5e620ad7-10e9-4b45-85f4-a4971dc70cd9-kube-api-access-62rh5\") pod \"ceilometer-0\" (UID: \"5e620ad7-10e9-4b45-85f4-a4971dc70cd9\") " pod="openstack/ceilometer-0" Sep 29 19:26:40 crc kubenswrapper[4779]: I0929 19:26:40.252398 4779 scope.go:117] "RemoveContainer" containerID="a7358d5a6b25fe588711b862903a2db92f9bbab4cc48eab5bf60c35eecf79ece" Sep 29 19:26:40 crc kubenswrapper[4779]: E0929 19:26:40.253000 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a7358d5a6b25fe588711b862903a2db92f9bbab4cc48eab5bf60c35eecf79ece\": container with ID starting with a7358d5a6b25fe588711b862903a2db92f9bbab4cc48eab5bf60c35eecf79ece not found: ID does not exist" containerID="a7358d5a6b25fe588711b862903a2db92f9bbab4cc48eab5bf60c35eecf79ece" Sep 29 19:26:40 crc kubenswrapper[4779]: I0929 19:26:40.253042 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a7358d5a6b25fe588711b862903a2db92f9bbab4cc48eab5bf60c35eecf79ece"} err="failed to get container status \"a7358d5a6b25fe588711b862903a2db92f9bbab4cc48eab5bf60c35eecf79ece\": rpc error: code = NotFound desc = could not find container \"a7358d5a6b25fe588711b862903a2db92f9bbab4cc48eab5bf60c35eecf79ece\": container with ID starting with a7358d5a6b25fe588711b862903a2db92f9bbab4cc48eab5bf60c35eecf79ece not found: ID does not exist" Sep 29 19:26:40 crc kubenswrapper[4779]: I0929 19:26:40.253067 4779 scope.go:117] "RemoveContainer" containerID="193924d7b40387c777023562b1dc745c8cabd5533a384e043e26a919e41b5bb1" Sep 29 19:26:40 crc kubenswrapper[4779]: E0929 19:26:40.253779 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"193924d7b40387c777023562b1dc745c8cabd5533a384e043e26a919e41b5bb1\": container with ID starting with 193924d7b40387c777023562b1dc745c8cabd5533a384e043e26a919e41b5bb1 not found: ID does not exist" containerID="193924d7b40387c777023562b1dc745c8cabd5533a384e043e26a919e41b5bb1" Sep 29 19:26:40 crc kubenswrapper[4779]: I0929 19:26:40.253829 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"193924d7b40387c777023562b1dc745c8cabd5533a384e043e26a919e41b5bb1"} err="failed to get container status \"193924d7b40387c777023562b1dc745c8cabd5533a384e043e26a919e41b5bb1\": rpc error: code = NotFound desc = could not find container \"193924d7b40387c777023562b1dc745c8cabd5533a384e043e26a919e41b5bb1\": container with ID starting with 193924d7b40387c777023562b1dc745c8cabd5533a384e043e26a919e41b5bb1 not found: ID does not exist" Sep 29 19:26:40 crc kubenswrapper[4779]: I0929 19:26:40.253859 4779 scope.go:117] "RemoveContainer" containerID="08cce7d93838bd1823e591ae7784a1a7ee1d2a92bffe784154d7706397159c99" Sep 29 19:26:40 crc kubenswrapper[4779]: I0929 19:26:40.272877 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 19:26:40 crc kubenswrapper[4779]: I0929 19:26:40.279376 4779 scope.go:117] "RemoveContainer" containerID="a9b8a0fd259cbd970917d192eb68c61afc4e21a0eb7d45ff714b4a84b9278dc8" Sep 29 19:26:40 crc kubenswrapper[4779]: I0929 19:26:40.311062 4779 scope.go:117] "RemoveContainer" containerID="0ef60d95f076d975cbaa63b5588150976ee184cdf1da3378acf55aa6ef20a00d" Sep 29 19:26:40 crc kubenswrapper[4779]: I0929 19:26:40.383227 4779 scope.go:117] "RemoveContainer" containerID="e1e73527fa5092322476e44a18898e4bad17d9da954b7048c3f8440d139315f3" Sep 29 19:26:40 crc kubenswrapper[4779]: I0929 19:26:40.426348 4779 scope.go:117] "RemoveContainer" containerID="08cce7d93838bd1823e591ae7784a1a7ee1d2a92bffe784154d7706397159c99" Sep 29 19:26:40 crc kubenswrapper[4779]: E0929 19:26:40.428333 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"08cce7d93838bd1823e591ae7784a1a7ee1d2a92bffe784154d7706397159c99\": container with ID starting with 08cce7d93838bd1823e591ae7784a1a7ee1d2a92bffe784154d7706397159c99 not found: ID does not exist" containerID="08cce7d93838bd1823e591ae7784a1a7ee1d2a92bffe784154d7706397159c99" Sep 29 19:26:40 crc kubenswrapper[4779]: I0929 19:26:40.428371 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"08cce7d93838bd1823e591ae7784a1a7ee1d2a92bffe784154d7706397159c99"} err="failed to get container status \"08cce7d93838bd1823e591ae7784a1a7ee1d2a92bffe784154d7706397159c99\": rpc error: code = NotFound desc = could not find container \"08cce7d93838bd1823e591ae7784a1a7ee1d2a92bffe784154d7706397159c99\": container with ID starting with 08cce7d93838bd1823e591ae7784a1a7ee1d2a92bffe784154d7706397159c99 not found: ID does not exist" Sep 29 19:26:40 crc kubenswrapper[4779]: I0929 19:26:40.428399 4779 scope.go:117] "RemoveContainer" containerID="a9b8a0fd259cbd970917d192eb68c61afc4e21a0eb7d45ff714b4a84b9278dc8" Sep 29 19:26:40 crc kubenswrapper[4779]: E0929 19:26:40.428926 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a9b8a0fd259cbd970917d192eb68c61afc4e21a0eb7d45ff714b4a84b9278dc8\": container with ID starting with a9b8a0fd259cbd970917d192eb68c61afc4e21a0eb7d45ff714b4a84b9278dc8 not found: ID does not exist" containerID="a9b8a0fd259cbd970917d192eb68c61afc4e21a0eb7d45ff714b4a84b9278dc8" Sep 29 19:26:40 crc kubenswrapper[4779]: I0929 19:26:40.428964 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a9b8a0fd259cbd970917d192eb68c61afc4e21a0eb7d45ff714b4a84b9278dc8"} err="failed to get container status \"a9b8a0fd259cbd970917d192eb68c61afc4e21a0eb7d45ff714b4a84b9278dc8\": rpc error: code = NotFound desc = could not find container \"a9b8a0fd259cbd970917d192eb68c61afc4e21a0eb7d45ff714b4a84b9278dc8\": container with ID starting with a9b8a0fd259cbd970917d192eb68c61afc4e21a0eb7d45ff714b4a84b9278dc8 not found: ID does not exist" Sep 29 19:26:40 crc kubenswrapper[4779]: I0929 19:26:40.428998 4779 scope.go:117] "RemoveContainer" containerID="0ef60d95f076d975cbaa63b5588150976ee184cdf1da3378acf55aa6ef20a00d" Sep 29 19:26:40 crc kubenswrapper[4779]: E0929 19:26:40.429310 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0ef60d95f076d975cbaa63b5588150976ee184cdf1da3378acf55aa6ef20a00d\": container with ID starting with 0ef60d95f076d975cbaa63b5588150976ee184cdf1da3378acf55aa6ef20a00d not found: ID does not exist" containerID="0ef60d95f076d975cbaa63b5588150976ee184cdf1da3378acf55aa6ef20a00d" Sep 29 19:26:40 crc kubenswrapper[4779]: I0929 19:26:40.429391 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0ef60d95f076d975cbaa63b5588150976ee184cdf1da3378acf55aa6ef20a00d"} err="failed to get container status \"0ef60d95f076d975cbaa63b5588150976ee184cdf1da3378acf55aa6ef20a00d\": rpc error: code = NotFound desc = could not find container \"0ef60d95f076d975cbaa63b5588150976ee184cdf1da3378acf55aa6ef20a00d\": container with ID starting with 0ef60d95f076d975cbaa63b5588150976ee184cdf1da3378acf55aa6ef20a00d not found: ID does not exist" Sep 29 19:26:40 crc kubenswrapper[4779]: I0929 19:26:40.429406 4779 scope.go:117] "RemoveContainer" containerID="e1e73527fa5092322476e44a18898e4bad17d9da954b7048c3f8440d139315f3" Sep 29 19:26:40 crc kubenswrapper[4779]: E0929 19:26:40.429840 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e1e73527fa5092322476e44a18898e4bad17d9da954b7048c3f8440d139315f3\": container with ID starting with e1e73527fa5092322476e44a18898e4bad17d9da954b7048c3f8440d139315f3 not found: ID does not exist" containerID="e1e73527fa5092322476e44a18898e4bad17d9da954b7048c3f8440d139315f3" Sep 29 19:26:40 crc kubenswrapper[4779]: I0929 19:26:40.429856 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e1e73527fa5092322476e44a18898e4bad17d9da954b7048c3f8440d139315f3"} err="failed to get container status \"e1e73527fa5092322476e44a18898e4bad17d9da954b7048c3f8440d139315f3\": rpc error: code = NotFound desc = could not find container \"e1e73527fa5092322476e44a18898e4bad17d9da954b7048c3f8440d139315f3\": container with ID starting with e1e73527fa5092322476e44a18898e4bad17d9da954b7048c3f8440d139315f3 not found: ID does not exist" Sep 29 19:26:40 crc kubenswrapper[4779]: I0929 19:26:40.866825 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"880c493a-a9b5-4cdc-a4b1-256feeee3e1b","Type":"ContainerStarted","Data":"cd603aea57bf80509d35b510c976f27e54da46c552c0f81fa98ce9dc34fa23c6"} Sep 29 19:26:40 crc kubenswrapper[4779]: I0929 19:26:40.870444 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"e063f6a5-dcd3-413d-bb65-e9ceeca73df0","Type":"ContainerStarted","Data":"79e60370340cf1f66e72f77ce46db3a3c4af412071b5f5d00a458ec97d60b791"} Sep 29 19:26:40 crc kubenswrapper[4779]: I0929 19:26:40.870489 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"e063f6a5-dcd3-413d-bb65-e9ceeca73df0","Type":"ContainerStarted","Data":"473afa980e4990a7e66210bc0510ea9c691846b62a6f58042a6cd67c30306b4a"} Sep 29 19:26:40 crc kubenswrapper[4779]: I0929 19:26:40.871271 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cinder-api-0" Sep 29 19:26:40 crc kubenswrapper[4779]: I0929 19:26:40.884548 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 29 19:26:40 crc kubenswrapper[4779]: I0929 19:26:40.895891 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=3.895873375 podStartE2EDuration="3.895873375s" podCreationTimestamp="2025-09-29 19:26:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 19:26:40.892693488 +0000 UTC m=+1111.777118578" watchObservedRunningTime="2025-09-29 19:26:40.895873375 +0000 UTC m=+1111.780298475" Sep 29 19:26:40 crc kubenswrapper[4779]: I0929 19:26:40.926859 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-api-0" podStartSLOduration=3.92684514 podStartE2EDuration="3.92684514s" podCreationTimestamp="2025-09-29 19:26:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 19:26:40.923769876 +0000 UTC m=+1111.808194976" watchObservedRunningTime="2025-09-29 19:26:40.92684514 +0000 UTC m=+1111.811270240" Sep 29 19:26:41 crc kubenswrapper[4779]: I0929 19:26:41.807076 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="00a7767e-dcd8-4e64-97b0-6d5bf4ec280e" path="/var/lib/kubelet/pods/00a7767e-dcd8-4e64-97b0-6d5bf4ec280e/volumes" Sep 29 19:26:41 crc kubenswrapper[4779]: I0929 19:26:41.808512 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="442ef8d4-8019-432f-8715-9b2a5aaaa022" path="/var/lib/kubelet/pods/442ef8d4-8019-432f-8715-9b2a5aaaa022/volumes" Sep 29 19:26:41 crc kubenswrapper[4779]: I0929 19:26:41.810004 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c5fc66bc-61c8-4f4c-838a-0ff89fbb9b1b" path="/var/lib/kubelet/pods/c5fc66bc-61c8-4f4c-838a-0ff89fbb9b1b/volumes" Sep 29 19:26:41 crc kubenswrapper[4779]: I0929 19:26:41.810575 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-scheduler-0" Sep 29 19:26:41 crc kubenswrapper[4779]: I0929 19:26:41.879730 4779 generic.go:334] "Generic (PLEG): container finished" podID="64e12e10-e0f1-4706-b0a2-ec78ce9921a1" containerID="a24a8a0e0e2fdffe46fcebbbaea17fe81b2296d14936502853999c7832cc3700" exitCode=0 Sep 29 19:26:41 crc kubenswrapper[4779]: I0929 19:26:41.879854 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-6656fdb884-9h5nb" event={"ID":"64e12e10-e0f1-4706-b0a2-ec78ce9921a1","Type":"ContainerDied","Data":"a24a8a0e0e2fdffe46fcebbbaea17fe81b2296d14936502853999c7832cc3700"} Sep 29 19:26:41 crc kubenswrapper[4779]: I0929 19:26:41.881229 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"5e620ad7-10e9-4b45-85f4-a4971dc70cd9","Type":"ContainerStarted","Data":"15255d4af4eb627a03440c17fa7c6914d4719a32f020a9b69b0ba88a922b3c4b"} Sep 29 19:26:41 crc kubenswrapper[4779]: I0929 19:26:41.881268 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"5e620ad7-10e9-4b45-85f4-a4971dc70cd9","Type":"ContainerStarted","Data":"17bf5115f8dce94b4499a7453a0d3ea1827948414610c0a8a14715626b031d7d"} Sep 29 19:26:41 crc kubenswrapper[4779]: I0929 19:26:41.886518 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"880c493a-a9b5-4cdc-a4b1-256feeee3e1b","Type":"ContainerStarted","Data":"8b10bf33c7ba1be0edb36aa43bb483a94b9b04497ce5e9c7389103aa6483cc6f"} Sep 29 19:26:41 crc kubenswrapper[4779]: I0929 19:26:41.889772 4779 generic.go:334] "Generic (PLEG): container finished" podID="be202ce5-0468-47b9-94bd-4bf15e9ad65b" containerID="efe227d05a85321bfef2305a94bd917d7139d78388dcc95dc3a82ee9ddc1be7c" exitCode=0 Sep 29 19:26:41 crc kubenswrapper[4779]: I0929 19:26:41.889860 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"be202ce5-0468-47b9-94bd-4bf15e9ad65b","Type":"ContainerDied","Data":"efe227d05a85321bfef2305a94bd917d7139d78388dcc95dc3a82ee9ddc1be7c"} Sep 29 19:26:42 crc kubenswrapper[4779]: I0929 19:26:42.088098 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-scheduler-0" Sep 29 19:26:42 crc kubenswrapper[4779]: I0929 19:26:42.088846 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-6656fdb884-9h5nb" Sep 29 19:26:42 crc kubenswrapper[4779]: I0929 19:26:42.095844 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Sep 29 19:26:42 crc kubenswrapper[4779]: I0929 19:26:42.127840 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Sep 29 19:26:42 crc kubenswrapper[4779]: I0929 19:26:42.171919 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-5f16-account-create-m6x8h"] Sep 29 19:26:42 crc kubenswrapper[4779]: E0929 19:26:42.172751 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="be202ce5-0468-47b9-94bd-4bf15e9ad65b" containerName="glance-log" Sep 29 19:26:42 crc kubenswrapper[4779]: I0929 19:26:42.172821 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="be202ce5-0468-47b9-94bd-4bf15e9ad65b" containerName="glance-log" Sep 29 19:26:42 crc kubenswrapper[4779]: E0929 19:26:42.172913 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="64e12e10-e0f1-4706-b0a2-ec78ce9921a1" containerName="neutron-api" Sep 29 19:26:42 crc kubenswrapper[4779]: I0929 19:26:42.172965 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="64e12e10-e0f1-4706-b0a2-ec78ce9921a1" containerName="neutron-api" Sep 29 19:26:42 crc kubenswrapper[4779]: E0929 19:26:42.173033 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="be202ce5-0468-47b9-94bd-4bf15e9ad65b" containerName="glance-httpd" Sep 29 19:26:42 crc kubenswrapper[4779]: I0929 19:26:42.173087 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="be202ce5-0468-47b9-94bd-4bf15e9ad65b" containerName="glance-httpd" Sep 29 19:26:42 crc kubenswrapper[4779]: E0929 19:26:42.173140 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="64e12e10-e0f1-4706-b0a2-ec78ce9921a1" containerName="neutron-httpd" Sep 29 19:26:42 crc kubenswrapper[4779]: I0929 19:26:42.173199 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="64e12e10-e0f1-4706-b0a2-ec78ce9921a1" containerName="neutron-httpd" Sep 29 19:26:42 crc kubenswrapper[4779]: I0929 19:26:42.173437 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="64e12e10-e0f1-4706-b0a2-ec78ce9921a1" containerName="neutron-api" Sep 29 19:26:42 crc kubenswrapper[4779]: I0929 19:26:42.173496 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="be202ce5-0468-47b9-94bd-4bf15e9ad65b" containerName="glance-httpd" Sep 29 19:26:42 crc kubenswrapper[4779]: I0929 19:26:42.173573 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="64e12e10-e0f1-4706-b0a2-ec78ce9921a1" containerName="neutron-httpd" Sep 29 19:26:42 crc kubenswrapper[4779]: I0929 19:26:42.173636 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="be202ce5-0468-47b9-94bd-4bf15e9ad65b" containerName="glance-log" Sep 29 19:26:42 crc kubenswrapper[4779]: I0929 19:26:42.174279 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-5f16-account-create-m6x8h" Sep 29 19:26:42 crc kubenswrapper[4779]: I0929 19:26:42.177834 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-db-secret" Sep 29 19:26:42 crc kubenswrapper[4779]: I0929 19:26:42.199259 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/be202ce5-0468-47b9-94bd-4bf15e9ad65b-scripts\") pod \"be202ce5-0468-47b9-94bd-4bf15e9ad65b\" (UID: \"be202ce5-0468-47b9-94bd-4bf15e9ad65b\") " Sep 29 19:26:42 crc kubenswrapper[4779]: I0929 19:26:42.199308 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-b4zqn\" (UniqueName: \"kubernetes.io/projected/64e12e10-e0f1-4706-b0a2-ec78ce9921a1-kube-api-access-b4zqn\") pod \"64e12e10-e0f1-4706-b0a2-ec78ce9921a1\" (UID: \"64e12e10-e0f1-4706-b0a2-ec78ce9921a1\") " Sep 29 19:26:42 crc kubenswrapper[4779]: I0929 19:26:42.199400 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/be202ce5-0468-47b9-94bd-4bf15e9ad65b-public-tls-certs\") pod \"be202ce5-0468-47b9-94bd-4bf15e9ad65b\" (UID: \"be202ce5-0468-47b9-94bd-4bf15e9ad65b\") " Sep 29 19:26:42 crc kubenswrapper[4779]: I0929 19:26:42.199432 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/be202ce5-0468-47b9-94bd-4bf15e9ad65b-httpd-run\") pod \"be202ce5-0468-47b9-94bd-4bf15e9ad65b\" (UID: \"be202ce5-0468-47b9-94bd-4bf15e9ad65b\") " Sep 29 19:26:42 crc kubenswrapper[4779]: I0929 19:26:42.199489 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/64e12e10-e0f1-4706-b0a2-ec78ce9921a1-combined-ca-bundle\") pod \"64e12e10-e0f1-4706-b0a2-ec78ce9921a1\" (UID: \"64e12e10-e0f1-4706-b0a2-ec78ce9921a1\") " Sep 29 19:26:42 crc kubenswrapper[4779]: I0929 19:26:42.199517 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/be202ce5-0468-47b9-94bd-4bf15e9ad65b-logs\") pod \"be202ce5-0468-47b9-94bd-4bf15e9ad65b\" (UID: \"be202ce5-0468-47b9-94bd-4bf15e9ad65b\") " Sep 29 19:26:42 crc kubenswrapper[4779]: I0929 19:26:42.199539 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/be202ce5-0468-47b9-94bd-4bf15e9ad65b-combined-ca-bundle\") pod \"be202ce5-0468-47b9-94bd-4bf15e9ad65b\" (UID: \"be202ce5-0468-47b9-94bd-4bf15e9ad65b\") " Sep 29 19:26:42 crc kubenswrapper[4779]: I0929 19:26:42.199556 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"be202ce5-0468-47b9-94bd-4bf15e9ad65b\" (UID: \"be202ce5-0468-47b9-94bd-4bf15e9ad65b\") " Sep 29 19:26:42 crc kubenswrapper[4779]: I0929 19:26:42.199575 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sz46f\" (UniqueName: \"kubernetes.io/projected/be202ce5-0468-47b9-94bd-4bf15e9ad65b-kube-api-access-sz46f\") pod \"be202ce5-0468-47b9-94bd-4bf15e9ad65b\" (UID: \"be202ce5-0468-47b9-94bd-4bf15e9ad65b\") " Sep 29 19:26:42 crc kubenswrapper[4779]: I0929 19:26:42.199592 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/64e12e10-e0f1-4706-b0a2-ec78ce9921a1-ovndb-tls-certs\") pod \"64e12e10-e0f1-4706-b0a2-ec78ce9921a1\" (UID: \"64e12e10-e0f1-4706-b0a2-ec78ce9921a1\") " Sep 29 19:26:42 crc kubenswrapper[4779]: I0929 19:26:42.199629 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/64e12e10-e0f1-4706-b0a2-ec78ce9921a1-config\") pod \"64e12e10-e0f1-4706-b0a2-ec78ce9921a1\" (UID: \"64e12e10-e0f1-4706-b0a2-ec78ce9921a1\") " Sep 29 19:26:42 crc kubenswrapper[4779]: I0929 19:26:42.199680 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/be202ce5-0468-47b9-94bd-4bf15e9ad65b-config-data\") pod \"be202ce5-0468-47b9-94bd-4bf15e9ad65b\" (UID: \"be202ce5-0468-47b9-94bd-4bf15e9ad65b\") " Sep 29 19:26:42 crc kubenswrapper[4779]: I0929 19:26:42.199856 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/64e12e10-e0f1-4706-b0a2-ec78ce9921a1-httpd-config\") pod \"64e12e10-e0f1-4706-b0a2-ec78ce9921a1\" (UID: \"64e12e10-e0f1-4706-b0a2-ec78ce9921a1\") " Sep 29 19:26:42 crc kubenswrapper[4779]: I0929 19:26:42.202036 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-5f16-account-create-m6x8h"] Sep 29 19:26:42 crc kubenswrapper[4779]: I0929 19:26:42.205710 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/be202ce5-0468-47b9-94bd-4bf15e9ad65b-scripts" (OuterVolumeSpecName: "scripts") pod "be202ce5-0468-47b9-94bd-4bf15e9ad65b" (UID: "be202ce5-0468-47b9-94bd-4bf15e9ad65b"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:26:42 crc kubenswrapper[4779]: I0929 19:26:42.215121 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/be202ce5-0468-47b9-94bd-4bf15e9ad65b-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "be202ce5-0468-47b9-94bd-4bf15e9ad65b" (UID: "be202ce5-0468-47b9-94bd-4bf15e9ad65b"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 19:26:42 crc kubenswrapper[4779]: I0929 19:26:42.215359 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/be202ce5-0468-47b9-94bd-4bf15e9ad65b-logs" (OuterVolumeSpecName: "logs") pod "be202ce5-0468-47b9-94bd-4bf15e9ad65b" (UID: "be202ce5-0468-47b9-94bd-4bf15e9ad65b"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 19:26:42 crc kubenswrapper[4779]: I0929 19:26:42.218122 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage08-crc" (OuterVolumeSpecName: "glance") pod "be202ce5-0468-47b9-94bd-4bf15e9ad65b" (UID: "be202ce5-0468-47b9-94bd-4bf15e9ad65b"). InnerVolumeSpecName "local-storage08-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Sep 29 19:26:42 crc kubenswrapper[4779]: I0929 19:26:42.220206 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/64e12e10-e0f1-4706-b0a2-ec78ce9921a1-kube-api-access-b4zqn" (OuterVolumeSpecName: "kube-api-access-b4zqn") pod "64e12e10-e0f1-4706-b0a2-ec78ce9921a1" (UID: "64e12e10-e0f1-4706-b0a2-ec78ce9921a1"). InnerVolumeSpecName "kube-api-access-b4zqn". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:26:42 crc kubenswrapper[4779]: I0929 19:26:42.243263 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/be202ce5-0468-47b9-94bd-4bf15e9ad65b-kube-api-access-sz46f" (OuterVolumeSpecName: "kube-api-access-sz46f") pod "be202ce5-0468-47b9-94bd-4bf15e9ad65b" (UID: "be202ce5-0468-47b9-94bd-4bf15e9ad65b"). InnerVolumeSpecName "kube-api-access-sz46f". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:26:42 crc kubenswrapper[4779]: I0929 19:26:42.266158 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/64e12e10-e0f1-4706-b0a2-ec78ce9921a1-httpd-config" (OuterVolumeSpecName: "httpd-config") pod "64e12e10-e0f1-4706-b0a2-ec78ce9921a1" (UID: "64e12e10-e0f1-4706-b0a2-ec78ce9921a1"). InnerVolumeSpecName "httpd-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:26:42 crc kubenswrapper[4779]: I0929 19:26:42.297971 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/be202ce5-0468-47b9-94bd-4bf15e9ad65b-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "be202ce5-0468-47b9-94bd-4bf15e9ad65b" (UID: "be202ce5-0468-47b9-94bd-4bf15e9ad65b"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:26:42 crc kubenswrapper[4779]: I0929 19:26:42.302507 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vnhsd\" (UniqueName: \"kubernetes.io/projected/511539e5-fd9d-4376-ae75-4c308f748af9-kube-api-access-vnhsd\") pod \"nova-api-5f16-account-create-m6x8h\" (UID: \"511539e5-fd9d-4376-ae75-4c308f748af9\") " pod="openstack/nova-api-5f16-account-create-m6x8h" Sep 29 19:26:42 crc kubenswrapper[4779]: I0929 19:26:42.302581 4779 reconciler_common.go:293] "Volume detached for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/64e12e10-e0f1-4706-b0a2-ec78ce9921a1-httpd-config\") on node \"crc\" DevicePath \"\"" Sep 29 19:26:42 crc kubenswrapper[4779]: I0929 19:26:42.302593 4779 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/be202ce5-0468-47b9-94bd-4bf15e9ad65b-scripts\") on node \"crc\" DevicePath \"\"" Sep 29 19:26:42 crc kubenswrapper[4779]: I0929 19:26:42.302603 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-b4zqn\" (UniqueName: \"kubernetes.io/projected/64e12e10-e0f1-4706-b0a2-ec78ce9921a1-kube-api-access-b4zqn\") on node \"crc\" DevicePath \"\"" Sep 29 19:26:42 crc kubenswrapper[4779]: I0929 19:26:42.302612 4779 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/be202ce5-0468-47b9-94bd-4bf15e9ad65b-httpd-run\") on node \"crc\" DevicePath \"\"" Sep 29 19:26:42 crc kubenswrapper[4779]: I0929 19:26:42.302621 4779 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/be202ce5-0468-47b9-94bd-4bf15e9ad65b-logs\") on node \"crc\" DevicePath \"\"" Sep 29 19:26:42 crc kubenswrapper[4779]: I0929 19:26:42.302628 4779 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/be202ce5-0468-47b9-94bd-4bf15e9ad65b-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 19:26:42 crc kubenswrapper[4779]: I0929 19:26:42.302648 4779 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") on node \"crc\" " Sep 29 19:26:42 crc kubenswrapper[4779]: I0929 19:26:42.302660 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sz46f\" (UniqueName: \"kubernetes.io/projected/be202ce5-0468-47b9-94bd-4bf15e9ad65b-kube-api-access-sz46f\") on node \"crc\" DevicePath \"\"" Sep 29 19:26:42 crc kubenswrapper[4779]: I0929 19:26:42.311828 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/be202ce5-0468-47b9-94bd-4bf15e9ad65b-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "be202ce5-0468-47b9-94bd-4bf15e9ad65b" (UID: "be202ce5-0468-47b9-94bd-4bf15e9ad65b"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:26:42 crc kubenswrapper[4779]: I0929 19:26:42.323126 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/64e12e10-e0f1-4706-b0a2-ec78ce9921a1-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "64e12e10-e0f1-4706-b0a2-ec78ce9921a1" (UID: "64e12e10-e0f1-4706-b0a2-ec78ce9921a1"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:26:42 crc kubenswrapper[4779]: I0929 19:26:42.332407 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/be202ce5-0468-47b9-94bd-4bf15e9ad65b-config-data" (OuterVolumeSpecName: "config-data") pod "be202ce5-0468-47b9-94bd-4bf15e9ad65b" (UID: "be202ce5-0468-47b9-94bd-4bf15e9ad65b"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:26:42 crc kubenswrapper[4779]: I0929 19:26:42.345575 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/64e12e10-e0f1-4706-b0a2-ec78ce9921a1-ovndb-tls-certs" (OuterVolumeSpecName: "ovndb-tls-certs") pod "64e12e10-e0f1-4706-b0a2-ec78ce9921a1" (UID: "64e12e10-e0f1-4706-b0a2-ec78ce9921a1"). InnerVolumeSpecName "ovndb-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:26:42 crc kubenswrapper[4779]: I0929 19:26:42.358890 4779 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage08-crc" (UniqueName: "kubernetes.io/local-volume/local-storage08-crc") on node "crc" Sep 29 19:26:42 crc kubenswrapper[4779]: I0929 19:26:42.364251 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-ed29-account-create-p2lzw"] Sep 29 19:26:42 crc kubenswrapper[4779]: I0929 19:26:42.365616 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-ed29-account-create-p2lzw" Sep 29 19:26:42 crc kubenswrapper[4779]: I0929 19:26:42.369792 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-db-secret" Sep 29 19:26:42 crc kubenswrapper[4779]: I0929 19:26:42.371551 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/64e12e10-e0f1-4706-b0a2-ec78ce9921a1-config" (OuterVolumeSpecName: "config") pod "64e12e10-e0f1-4706-b0a2-ec78ce9921a1" (UID: "64e12e10-e0f1-4706-b0a2-ec78ce9921a1"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:26:42 crc kubenswrapper[4779]: I0929 19:26:42.375107 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-ed29-account-create-p2lzw"] Sep 29 19:26:42 crc kubenswrapper[4779]: I0929 19:26:42.404485 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vnhsd\" (UniqueName: \"kubernetes.io/projected/511539e5-fd9d-4376-ae75-4c308f748af9-kube-api-access-vnhsd\") pod \"nova-api-5f16-account-create-m6x8h\" (UID: \"511539e5-fd9d-4376-ae75-4c308f748af9\") " pod="openstack/nova-api-5f16-account-create-m6x8h" Sep 29 19:26:42 crc kubenswrapper[4779]: I0929 19:26:42.404596 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m2288\" (UniqueName: \"kubernetes.io/projected/affb28d4-02a0-4da0-9160-14b003815d10-kube-api-access-m2288\") pod \"nova-cell0-ed29-account-create-p2lzw\" (UID: \"affb28d4-02a0-4da0-9160-14b003815d10\") " pod="openstack/nova-cell0-ed29-account-create-p2lzw" Sep 29 19:26:42 crc kubenswrapper[4779]: I0929 19:26:42.405221 4779 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/be202ce5-0468-47b9-94bd-4bf15e9ad65b-public-tls-certs\") on node \"crc\" DevicePath \"\"" Sep 29 19:26:42 crc kubenswrapper[4779]: I0929 19:26:42.405242 4779 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/64e12e10-e0f1-4706-b0a2-ec78ce9921a1-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 19:26:42 crc kubenswrapper[4779]: I0929 19:26:42.405263 4779 reconciler_common.go:293] "Volume detached for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") on node \"crc\" DevicePath \"\"" Sep 29 19:26:42 crc kubenswrapper[4779]: I0929 19:26:42.405273 4779 reconciler_common.go:293] "Volume detached for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/64e12e10-e0f1-4706-b0a2-ec78ce9921a1-ovndb-tls-certs\") on node \"crc\" DevicePath \"\"" Sep 29 19:26:42 crc kubenswrapper[4779]: I0929 19:26:42.405284 4779 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/64e12e10-e0f1-4706-b0a2-ec78ce9921a1-config\") on node \"crc\" DevicePath \"\"" Sep 29 19:26:42 crc kubenswrapper[4779]: I0929 19:26:42.405296 4779 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/be202ce5-0468-47b9-94bd-4bf15e9ad65b-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 19:26:42 crc kubenswrapper[4779]: I0929 19:26:42.426924 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vnhsd\" (UniqueName: \"kubernetes.io/projected/511539e5-fd9d-4376-ae75-4c308f748af9-kube-api-access-vnhsd\") pod \"nova-api-5f16-account-create-m6x8h\" (UID: \"511539e5-fd9d-4376-ae75-4c308f748af9\") " pod="openstack/nova-api-5f16-account-create-m6x8h" Sep 29 19:26:42 crc kubenswrapper[4779]: I0929 19:26:42.507354 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m2288\" (UniqueName: \"kubernetes.io/projected/affb28d4-02a0-4da0-9160-14b003815d10-kube-api-access-m2288\") pod \"nova-cell0-ed29-account-create-p2lzw\" (UID: \"affb28d4-02a0-4da0-9160-14b003815d10\") " pod="openstack/nova-cell0-ed29-account-create-p2lzw" Sep 29 19:26:42 crc kubenswrapper[4779]: I0929 19:26:42.521837 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m2288\" (UniqueName: \"kubernetes.io/projected/affb28d4-02a0-4da0-9160-14b003815d10-kube-api-access-m2288\") pod \"nova-cell0-ed29-account-create-p2lzw\" (UID: \"affb28d4-02a0-4da0-9160-14b003815d10\") " pod="openstack/nova-cell0-ed29-account-create-p2lzw" Sep 29 19:26:42 crc kubenswrapper[4779]: I0929 19:26:42.544277 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-5f16-account-create-m6x8h" Sep 29 19:26:42 crc kubenswrapper[4779]: I0929 19:26:42.562665 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-c17f-account-create-pmqwr"] Sep 29 19:26:42 crc kubenswrapper[4779]: I0929 19:26:42.563833 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-c17f-account-create-pmqwr" Sep 29 19:26:42 crc kubenswrapper[4779]: I0929 19:26:42.565891 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-db-secret" Sep 29 19:26:42 crc kubenswrapper[4779]: I0929 19:26:42.583027 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-c17f-account-create-pmqwr"] Sep 29 19:26:42 crc kubenswrapper[4779]: I0929 19:26:42.609143 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lhlwm\" (UniqueName: \"kubernetes.io/projected/5e31e631-57a3-4b9b-8d0a-4a2e3cf7d083-kube-api-access-lhlwm\") pod \"nova-cell1-c17f-account-create-pmqwr\" (UID: \"5e31e631-57a3-4b9b-8d0a-4a2e3cf7d083\") " pod="openstack/nova-cell1-c17f-account-create-pmqwr" Sep 29 19:26:42 crc kubenswrapper[4779]: I0929 19:26:42.699474 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-ed29-account-create-p2lzw" Sep 29 19:26:42 crc kubenswrapper[4779]: I0929 19:26:42.713778 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lhlwm\" (UniqueName: \"kubernetes.io/projected/5e31e631-57a3-4b9b-8d0a-4a2e3cf7d083-kube-api-access-lhlwm\") pod \"nova-cell1-c17f-account-create-pmqwr\" (UID: \"5e31e631-57a3-4b9b-8d0a-4a2e3cf7d083\") " pod="openstack/nova-cell1-c17f-account-create-pmqwr" Sep 29 19:26:42 crc kubenswrapper[4779]: I0929 19:26:42.737943 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lhlwm\" (UniqueName: \"kubernetes.io/projected/5e31e631-57a3-4b9b-8d0a-4a2e3cf7d083-kube-api-access-lhlwm\") pod \"nova-cell1-c17f-account-create-pmqwr\" (UID: \"5e31e631-57a3-4b9b-8d0a-4a2e3cf7d083\") " pod="openstack/nova-cell1-c17f-account-create-pmqwr" Sep 29 19:26:42 crc kubenswrapper[4779]: I0929 19:26:42.892776 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-c17f-account-create-pmqwr" Sep 29 19:26:42 crc kubenswrapper[4779]: I0929 19:26:42.903950 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-6656fdb884-9h5nb" Sep 29 19:26:42 crc kubenswrapper[4779]: I0929 19:26:42.903964 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-6656fdb884-9h5nb" event={"ID":"64e12e10-e0f1-4706-b0a2-ec78ce9921a1","Type":"ContainerDied","Data":"c4ccda0b04cb30f618b4d86978b3ec9f047667eeb74286a77090bdfc5fb03521"} Sep 29 19:26:42 crc kubenswrapper[4779]: I0929 19:26:42.904028 4779 scope.go:117] "RemoveContainer" containerID="336940a08ed36f3eff3815dcf956450178f41f48a039feddb49310c234ef6ee2" Sep 29 19:26:42 crc kubenswrapper[4779]: I0929 19:26:42.930230 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"5e620ad7-10e9-4b45-85f4-a4971dc70cd9","Type":"ContainerStarted","Data":"de50681d5dea13e2ed88fdf1090e55567903ca4cb1d53ea431d460f72f455c11"} Sep 29 19:26:42 crc kubenswrapper[4779]: I0929 19:26:42.937023 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"be202ce5-0468-47b9-94bd-4bf15e9ad65b","Type":"ContainerDied","Data":"53fa5a67caa0ea269793cad27d822aa72b62e50c7a5a2e12eb8fe2ece55e4d64"} Sep 29 19:26:42 crc kubenswrapper[4779]: I0929 19:26:42.937164 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Sep 29 19:26:42 crc kubenswrapper[4779]: I0929 19:26:42.937349 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="92130510-fe09-43ca-af98-8242adf45dc4" containerName="cinder-scheduler" containerID="cri-o://37d18db00a782ab4a30be90646e85b1eb28c893fd2ef59834eab4d14377820df" gracePeriod=30 Sep 29 19:26:42 crc kubenswrapper[4779]: I0929 19:26:42.938845 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="92130510-fe09-43ca-af98-8242adf45dc4" containerName="probe" containerID="cri-o://081fd9057c47d5c2cc052e736aefd00f28bb925c0f3ccc98b11a7ad45b66744f" gracePeriod=30 Sep 29 19:26:43 crc kubenswrapper[4779]: I0929 19:26:43.008331 4779 scope.go:117] "RemoveContainer" containerID="a24a8a0e0e2fdffe46fcebbbaea17fe81b2296d14936502853999c7832cc3700" Sep 29 19:26:43 crc kubenswrapper[4779]: I0929 19:26:43.028266 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-5f16-account-create-m6x8h"] Sep 29 19:26:43 crc kubenswrapper[4779]: I0929 19:26:43.077497 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-6656fdb884-9h5nb"] Sep 29 19:26:43 crc kubenswrapper[4779]: I0929 19:26:43.088544 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-6656fdb884-9h5nb"] Sep 29 19:26:43 crc kubenswrapper[4779]: I0929 19:26:43.089500 4779 scope.go:117] "RemoveContainer" containerID="efe227d05a85321bfef2305a94bd917d7139d78388dcc95dc3a82ee9ddc1be7c" Sep 29 19:26:43 crc kubenswrapper[4779]: I0929 19:26:43.103377 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Sep 29 19:26:43 crc kubenswrapper[4779]: I0929 19:26:43.117383 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-external-api-0"] Sep 29 19:26:43 crc kubenswrapper[4779]: I0929 19:26:43.126967 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Sep 29 19:26:43 crc kubenswrapper[4779]: I0929 19:26:43.130061 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Sep 29 19:26:43 crc kubenswrapper[4779]: I0929 19:26:43.132412 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Sep 29 19:26:43 crc kubenswrapper[4779]: I0929 19:26:43.132548 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-public-svc" Sep 29 19:26:43 crc kubenswrapper[4779]: I0929 19:26:43.132719 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Sep 29 19:26:43 crc kubenswrapper[4779]: I0929 19:26:43.173975 4779 scope.go:117] "RemoveContainer" containerID="a5bfae5491dc68369f3b0bb4f5458e92356ba49c7e40f4ad60f3cf763e8fed7a" Sep 29 19:26:43 crc kubenswrapper[4779]: I0929 19:26:43.196135 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-ed29-account-create-p2lzw"] Sep 29 19:26:43 crc kubenswrapper[4779]: I0929 19:26:43.232595 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fed19b9e-ec0b-4944-a98c-885ec4862d48-config-data\") pod \"glance-default-external-api-0\" (UID: \"fed19b9e-ec0b-4944-a98c-885ec4862d48\") " pod="openstack/glance-default-external-api-0" Sep 29 19:26:43 crc kubenswrapper[4779]: I0929 19:26:43.232623 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fed19b9e-ec0b-4944-a98c-885ec4862d48-scripts\") pod \"glance-default-external-api-0\" (UID: \"fed19b9e-ec0b-4944-a98c-885ec4862d48\") " pod="openstack/glance-default-external-api-0" Sep 29 19:26:43 crc kubenswrapper[4779]: I0929 19:26:43.232655 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fed19b9e-ec0b-4944-a98c-885ec4862d48-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"fed19b9e-ec0b-4944-a98c-885ec4862d48\") " pod="openstack/glance-default-external-api-0" Sep 29 19:26:43 crc kubenswrapper[4779]: I0929 19:26:43.232679 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pcqhq\" (UniqueName: \"kubernetes.io/projected/fed19b9e-ec0b-4944-a98c-885ec4862d48-kube-api-access-pcqhq\") pod \"glance-default-external-api-0\" (UID: \"fed19b9e-ec0b-4944-a98c-885ec4862d48\") " pod="openstack/glance-default-external-api-0" Sep 29 19:26:43 crc kubenswrapper[4779]: I0929 19:26:43.232821 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fed19b9e-ec0b-4944-a98c-885ec4862d48-logs\") pod \"glance-default-external-api-0\" (UID: \"fed19b9e-ec0b-4944-a98c-885ec4862d48\") " pod="openstack/glance-default-external-api-0" Sep 29 19:26:43 crc kubenswrapper[4779]: I0929 19:26:43.232986 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"glance-default-external-api-0\" (UID: \"fed19b9e-ec0b-4944-a98c-885ec4862d48\") " pod="openstack/glance-default-external-api-0" Sep 29 19:26:43 crc kubenswrapper[4779]: I0929 19:26:43.233026 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/fed19b9e-ec0b-4944-a98c-885ec4862d48-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"fed19b9e-ec0b-4944-a98c-885ec4862d48\") " pod="openstack/glance-default-external-api-0" Sep 29 19:26:43 crc kubenswrapper[4779]: I0929 19:26:43.233333 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/fed19b9e-ec0b-4944-a98c-885ec4862d48-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"fed19b9e-ec0b-4944-a98c-885ec4862d48\") " pod="openstack/glance-default-external-api-0" Sep 29 19:26:43 crc kubenswrapper[4779]: I0929 19:26:43.335148 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fed19b9e-ec0b-4944-a98c-885ec4862d48-config-data\") pod \"glance-default-external-api-0\" (UID: \"fed19b9e-ec0b-4944-a98c-885ec4862d48\") " pod="openstack/glance-default-external-api-0" Sep 29 19:26:43 crc kubenswrapper[4779]: I0929 19:26:43.335195 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fed19b9e-ec0b-4944-a98c-885ec4862d48-scripts\") pod \"glance-default-external-api-0\" (UID: \"fed19b9e-ec0b-4944-a98c-885ec4862d48\") " pod="openstack/glance-default-external-api-0" Sep 29 19:26:43 crc kubenswrapper[4779]: I0929 19:26:43.335225 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fed19b9e-ec0b-4944-a98c-885ec4862d48-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"fed19b9e-ec0b-4944-a98c-885ec4862d48\") " pod="openstack/glance-default-external-api-0" Sep 29 19:26:43 crc kubenswrapper[4779]: I0929 19:26:43.335253 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pcqhq\" (UniqueName: \"kubernetes.io/projected/fed19b9e-ec0b-4944-a98c-885ec4862d48-kube-api-access-pcqhq\") pod \"glance-default-external-api-0\" (UID: \"fed19b9e-ec0b-4944-a98c-885ec4862d48\") " pod="openstack/glance-default-external-api-0" Sep 29 19:26:43 crc kubenswrapper[4779]: I0929 19:26:43.335290 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fed19b9e-ec0b-4944-a98c-885ec4862d48-logs\") pod \"glance-default-external-api-0\" (UID: \"fed19b9e-ec0b-4944-a98c-885ec4862d48\") " pod="openstack/glance-default-external-api-0" Sep 29 19:26:43 crc kubenswrapper[4779]: I0929 19:26:43.335357 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"glance-default-external-api-0\" (UID: \"fed19b9e-ec0b-4944-a98c-885ec4862d48\") " pod="openstack/glance-default-external-api-0" Sep 29 19:26:43 crc kubenswrapper[4779]: I0929 19:26:43.335378 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/fed19b9e-ec0b-4944-a98c-885ec4862d48-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"fed19b9e-ec0b-4944-a98c-885ec4862d48\") " pod="openstack/glance-default-external-api-0" Sep 29 19:26:43 crc kubenswrapper[4779]: I0929 19:26:43.335412 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/fed19b9e-ec0b-4944-a98c-885ec4862d48-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"fed19b9e-ec0b-4944-a98c-885ec4862d48\") " pod="openstack/glance-default-external-api-0" Sep 29 19:26:43 crc kubenswrapper[4779]: I0929 19:26:43.335838 4779 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"glance-default-external-api-0\" (UID: \"fed19b9e-ec0b-4944-a98c-885ec4862d48\") device mount path \"/mnt/openstack/pv08\"" pod="openstack/glance-default-external-api-0" Sep 29 19:26:43 crc kubenswrapper[4779]: I0929 19:26:43.336010 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/fed19b9e-ec0b-4944-a98c-885ec4862d48-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"fed19b9e-ec0b-4944-a98c-885ec4862d48\") " pod="openstack/glance-default-external-api-0" Sep 29 19:26:43 crc kubenswrapper[4779]: I0929 19:26:43.336149 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fed19b9e-ec0b-4944-a98c-885ec4862d48-logs\") pod \"glance-default-external-api-0\" (UID: \"fed19b9e-ec0b-4944-a98c-885ec4862d48\") " pod="openstack/glance-default-external-api-0" Sep 29 19:26:43 crc kubenswrapper[4779]: I0929 19:26:43.342778 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fed19b9e-ec0b-4944-a98c-885ec4862d48-scripts\") pod \"glance-default-external-api-0\" (UID: \"fed19b9e-ec0b-4944-a98c-885ec4862d48\") " pod="openstack/glance-default-external-api-0" Sep 29 19:26:43 crc kubenswrapper[4779]: I0929 19:26:43.344883 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fed19b9e-ec0b-4944-a98c-885ec4862d48-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"fed19b9e-ec0b-4944-a98c-885ec4862d48\") " pod="openstack/glance-default-external-api-0" Sep 29 19:26:43 crc kubenswrapper[4779]: I0929 19:26:43.347192 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fed19b9e-ec0b-4944-a98c-885ec4862d48-config-data\") pod \"glance-default-external-api-0\" (UID: \"fed19b9e-ec0b-4944-a98c-885ec4862d48\") " pod="openstack/glance-default-external-api-0" Sep 29 19:26:43 crc kubenswrapper[4779]: I0929 19:26:43.347782 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/fed19b9e-ec0b-4944-a98c-885ec4862d48-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"fed19b9e-ec0b-4944-a98c-885ec4862d48\") " pod="openstack/glance-default-external-api-0" Sep 29 19:26:43 crc kubenswrapper[4779]: I0929 19:26:43.355145 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pcqhq\" (UniqueName: \"kubernetes.io/projected/fed19b9e-ec0b-4944-a98c-885ec4862d48-kube-api-access-pcqhq\") pod \"glance-default-external-api-0\" (UID: \"fed19b9e-ec0b-4944-a98c-885ec4862d48\") " pod="openstack/glance-default-external-api-0" Sep 29 19:26:43 crc kubenswrapper[4779]: I0929 19:26:43.371641 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"glance-default-external-api-0\" (UID: \"fed19b9e-ec0b-4944-a98c-885ec4862d48\") " pod="openstack/glance-default-external-api-0" Sep 29 19:26:43 crc kubenswrapper[4779]: I0929 19:26:43.420146 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-c17f-account-create-pmqwr"] Sep 29 19:26:43 crc kubenswrapper[4779]: W0929 19:26:43.420409 4779 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5e31e631_57a3_4b9b_8d0a_4a2e3cf7d083.slice/crio-b98a41ffdcb8a8b17b7e88bce68c999e13601b8b8f3945e8d09331e18b6aa4af WatchSource:0}: Error finding container b98a41ffdcb8a8b17b7e88bce68c999e13601b8b8f3945e8d09331e18b6aa4af: Status 404 returned error can't find the container with id b98a41ffdcb8a8b17b7e88bce68c999e13601b8b8f3945e8d09331e18b6aa4af Sep 29 19:26:43 crc kubenswrapper[4779]: I0929 19:26:43.459861 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Sep 29 19:26:43 crc kubenswrapper[4779]: I0929 19:26:43.787456 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="64e12e10-e0f1-4706-b0a2-ec78ce9921a1" path="/var/lib/kubelet/pods/64e12e10-e0f1-4706-b0a2-ec78ce9921a1/volumes" Sep 29 19:26:43 crc kubenswrapper[4779]: I0929 19:26:43.788611 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="be202ce5-0468-47b9-94bd-4bf15e9ad65b" path="/var/lib/kubelet/pods/be202ce5-0468-47b9-94bd-4bf15e9ad65b/volumes" Sep 29 19:26:43 crc kubenswrapper[4779]: I0929 19:26:43.948752 4779 generic.go:334] "Generic (PLEG): container finished" podID="92130510-fe09-43ca-af98-8242adf45dc4" containerID="081fd9057c47d5c2cc052e736aefd00f28bb925c0f3ccc98b11a7ad45b66744f" exitCode=0 Sep 29 19:26:43 crc kubenswrapper[4779]: I0929 19:26:43.948814 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"92130510-fe09-43ca-af98-8242adf45dc4","Type":"ContainerDied","Data":"081fd9057c47d5c2cc052e736aefd00f28bb925c0f3ccc98b11a7ad45b66744f"} Sep 29 19:26:43 crc kubenswrapper[4779]: I0929 19:26:43.950737 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"5e620ad7-10e9-4b45-85f4-a4971dc70cd9","Type":"ContainerStarted","Data":"c2ebf37cd7a6c69d145c62cdb82bc2071da1d00bc5ecc8a922e58370fcccda54"} Sep 29 19:26:43 crc kubenswrapper[4779]: I0929 19:26:43.964095 4779 generic.go:334] "Generic (PLEG): container finished" podID="511539e5-fd9d-4376-ae75-4c308f748af9" containerID="db998984beabddce9c56c41f4f1a92332da7d7b9924b4539e86e4742360f18aa" exitCode=0 Sep 29 19:26:43 crc kubenswrapper[4779]: I0929 19:26:43.964171 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-5f16-account-create-m6x8h" event={"ID":"511539e5-fd9d-4376-ae75-4c308f748af9","Type":"ContainerDied","Data":"db998984beabddce9c56c41f4f1a92332da7d7b9924b4539e86e4742360f18aa"} Sep 29 19:26:43 crc kubenswrapper[4779]: I0929 19:26:43.964195 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-5f16-account-create-m6x8h" event={"ID":"511539e5-fd9d-4376-ae75-4c308f748af9","Type":"ContainerStarted","Data":"c79449c46300dc1886d8ee81aaa8a3b75693c15e992e745ff86c058aad4389f1"} Sep 29 19:26:43 crc kubenswrapper[4779]: I0929 19:26:43.975755 4779 generic.go:334] "Generic (PLEG): container finished" podID="affb28d4-02a0-4da0-9160-14b003815d10" containerID="35f5194d8ae85935a5ffc719458a06390a2604ef60d79e106f9d12797b4e0146" exitCode=0 Sep 29 19:26:43 crc kubenswrapper[4779]: I0929 19:26:43.975836 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-ed29-account-create-p2lzw" event={"ID":"affb28d4-02a0-4da0-9160-14b003815d10","Type":"ContainerDied","Data":"35f5194d8ae85935a5ffc719458a06390a2604ef60d79e106f9d12797b4e0146"} Sep 29 19:26:43 crc kubenswrapper[4779]: I0929 19:26:43.975857 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-ed29-account-create-p2lzw" event={"ID":"affb28d4-02a0-4da0-9160-14b003815d10","Type":"ContainerStarted","Data":"ef3d43fdbf59558899e4d5faaafa0e1f09b57d923e126c6d6e8bbc8bf13c588c"} Sep 29 19:26:43 crc kubenswrapper[4779]: I0929 19:26:43.984858 4779 generic.go:334] "Generic (PLEG): container finished" podID="5e31e631-57a3-4b9b-8d0a-4a2e3cf7d083" containerID="add0df3627e6f15e5041d51186e821afb4f3aa74891ee6298afb7b80dcf5c837" exitCode=0 Sep 29 19:26:43 crc kubenswrapper[4779]: I0929 19:26:43.985034 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-c17f-account-create-pmqwr" event={"ID":"5e31e631-57a3-4b9b-8d0a-4a2e3cf7d083","Type":"ContainerDied","Data":"add0df3627e6f15e5041d51186e821afb4f3aa74891ee6298afb7b80dcf5c837"} Sep 29 19:26:43 crc kubenswrapper[4779]: I0929 19:26:43.985210 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-c17f-account-create-pmqwr" event={"ID":"5e31e631-57a3-4b9b-8d0a-4a2e3cf7d083","Type":"ContainerStarted","Data":"b98a41ffdcb8a8b17b7e88bce68c999e13601b8b8f3945e8d09331e18b6aa4af"} Sep 29 19:26:44 crc kubenswrapper[4779]: I0929 19:26:44.076720 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Sep 29 19:26:45 crc kubenswrapper[4779]: I0929 19:26:45.005092 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"fed19b9e-ec0b-4944-a98c-885ec4862d48","Type":"ContainerStarted","Data":"439e0e1b6c944a8e5de457cf5e4f5090c93ab44dfdb6c90018fdf5643d545636"} Sep 29 19:26:45 crc kubenswrapper[4779]: I0929 19:26:45.005889 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"fed19b9e-ec0b-4944-a98c-885ec4862d48","Type":"ContainerStarted","Data":"0f8d06f32309a0ade4a4cbe913ae0c696aa8b979f19693e3d6db6e69a22ab370"} Sep 29 19:26:45 crc kubenswrapper[4779]: I0929 19:26:45.424248 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-ed29-account-create-p2lzw" Sep 29 19:26:45 crc kubenswrapper[4779]: I0929 19:26:45.452108 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-c17f-account-create-pmqwr" Sep 29 19:26:45 crc kubenswrapper[4779]: I0929 19:26:45.474943 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lhlwm\" (UniqueName: \"kubernetes.io/projected/5e31e631-57a3-4b9b-8d0a-4a2e3cf7d083-kube-api-access-lhlwm\") pod \"5e31e631-57a3-4b9b-8d0a-4a2e3cf7d083\" (UID: \"5e31e631-57a3-4b9b-8d0a-4a2e3cf7d083\") " Sep 29 19:26:45 crc kubenswrapper[4779]: I0929 19:26:45.475200 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-m2288\" (UniqueName: \"kubernetes.io/projected/affb28d4-02a0-4da0-9160-14b003815d10-kube-api-access-m2288\") pod \"affb28d4-02a0-4da0-9160-14b003815d10\" (UID: \"affb28d4-02a0-4da0-9160-14b003815d10\") " Sep 29 19:26:45 crc kubenswrapper[4779]: I0929 19:26:45.481431 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/affb28d4-02a0-4da0-9160-14b003815d10-kube-api-access-m2288" (OuterVolumeSpecName: "kube-api-access-m2288") pod "affb28d4-02a0-4da0-9160-14b003815d10" (UID: "affb28d4-02a0-4da0-9160-14b003815d10"). InnerVolumeSpecName "kube-api-access-m2288". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:26:45 crc kubenswrapper[4779]: I0929 19:26:45.488218 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5e31e631-57a3-4b9b-8d0a-4a2e3cf7d083-kube-api-access-lhlwm" (OuterVolumeSpecName: "kube-api-access-lhlwm") pod "5e31e631-57a3-4b9b-8d0a-4a2e3cf7d083" (UID: "5e31e631-57a3-4b9b-8d0a-4a2e3cf7d083"). InnerVolumeSpecName "kube-api-access-lhlwm". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:26:45 crc kubenswrapper[4779]: I0929 19:26:45.497547 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-5f16-account-create-m6x8h" Sep 29 19:26:45 crc kubenswrapper[4779]: I0929 19:26:45.576840 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vnhsd\" (UniqueName: \"kubernetes.io/projected/511539e5-fd9d-4376-ae75-4c308f748af9-kube-api-access-vnhsd\") pod \"511539e5-fd9d-4376-ae75-4c308f748af9\" (UID: \"511539e5-fd9d-4376-ae75-4c308f748af9\") " Sep 29 19:26:45 crc kubenswrapper[4779]: I0929 19:26:45.577609 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-m2288\" (UniqueName: \"kubernetes.io/projected/affb28d4-02a0-4da0-9160-14b003815d10-kube-api-access-m2288\") on node \"crc\" DevicePath \"\"" Sep 29 19:26:45 crc kubenswrapper[4779]: I0929 19:26:45.577622 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lhlwm\" (UniqueName: \"kubernetes.io/projected/5e31e631-57a3-4b9b-8d0a-4a2e3cf7d083-kube-api-access-lhlwm\") on node \"crc\" DevicePath \"\"" Sep 29 19:26:45 crc kubenswrapper[4779]: I0929 19:26:45.582216 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/511539e5-fd9d-4376-ae75-4c308f748af9-kube-api-access-vnhsd" (OuterVolumeSpecName: "kube-api-access-vnhsd") pod "511539e5-fd9d-4376-ae75-4c308f748af9" (UID: "511539e5-fd9d-4376-ae75-4c308f748af9"). InnerVolumeSpecName "kube-api-access-vnhsd". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:26:45 crc kubenswrapper[4779]: I0929 19:26:45.678689 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vnhsd\" (UniqueName: \"kubernetes.io/projected/511539e5-fd9d-4376-ae75-4c308f748af9-kube-api-access-vnhsd\") on node \"crc\" DevicePath \"\"" Sep 29 19:26:45 crc kubenswrapper[4779]: I0929 19:26:45.872108 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Sep 29 19:26:46 crc kubenswrapper[4779]: I0929 19:26:46.034064 4779 generic.go:334] "Generic (PLEG): container finished" podID="92130510-fe09-43ca-af98-8242adf45dc4" containerID="37d18db00a782ab4a30be90646e85b1eb28c893fd2ef59834eab4d14377820df" exitCode=0 Sep 29 19:26:46 crc kubenswrapper[4779]: I0929 19:26:46.034160 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"92130510-fe09-43ca-af98-8242adf45dc4","Type":"ContainerDied","Data":"37d18db00a782ab4a30be90646e85b1eb28c893fd2ef59834eab4d14377820df"} Sep 29 19:26:46 crc kubenswrapper[4779]: I0929 19:26:46.048063 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"5e620ad7-10e9-4b45-85f4-a4971dc70cd9","Type":"ContainerStarted","Data":"aaa3d5bdd6e011f3cd4039f32a6d31ebf291ac5bdc4e2081177abb7938c146f2"} Sep 29 19:26:46 crc kubenswrapper[4779]: I0929 19:26:46.048482 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Sep 29 19:26:46 crc kubenswrapper[4779]: I0929 19:26:46.053442 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-5f16-account-create-m6x8h" event={"ID":"511539e5-fd9d-4376-ae75-4c308f748af9","Type":"ContainerDied","Data":"c79449c46300dc1886d8ee81aaa8a3b75693c15e992e745ff86c058aad4389f1"} Sep 29 19:26:46 crc kubenswrapper[4779]: I0929 19:26:46.053589 4779 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c79449c46300dc1886d8ee81aaa8a3b75693c15e992e745ff86c058aad4389f1" Sep 29 19:26:46 crc kubenswrapper[4779]: I0929 19:26:46.053471 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-5f16-account-create-m6x8h" Sep 29 19:26:46 crc kubenswrapper[4779]: I0929 19:26:46.059656 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"fed19b9e-ec0b-4944-a98c-885ec4862d48","Type":"ContainerStarted","Data":"a63fa3268152c99a0d4db753d7ea8bfc1957e0bbebfe024887683a7d551841cf"} Sep 29 19:26:46 crc kubenswrapper[4779]: I0929 19:26:46.071913 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-ed29-account-create-p2lzw" event={"ID":"affb28d4-02a0-4da0-9160-14b003815d10","Type":"ContainerDied","Data":"ef3d43fdbf59558899e4d5faaafa0e1f09b57d923e126c6d6e8bbc8bf13c588c"} Sep 29 19:26:46 crc kubenswrapper[4779]: I0929 19:26:46.071975 4779 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ef3d43fdbf59558899e4d5faaafa0e1f09b57d923e126c6d6e8bbc8bf13c588c" Sep 29 19:26:46 crc kubenswrapper[4779]: I0929 19:26:46.072068 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-ed29-account-create-p2lzw" Sep 29 19:26:46 crc kubenswrapper[4779]: I0929 19:26:46.082596 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-c17f-account-create-pmqwr" event={"ID":"5e31e631-57a3-4b9b-8d0a-4a2e3cf7d083","Type":"ContainerDied","Data":"b98a41ffdcb8a8b17b7e88bce68c999e13601b8b8f3945e8d09331e18b6aa4af"} Sep 29 19:26:46 crc kubenswrapper[4779]: I0929 19:26:46.082643 4779 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b98a41ffdcb8a8b17b7e88bce68c999e13601b8b8f3945e8d09331e18b6aa4af" Sep 29 19:26:46 crc kubenswrapper[4779]: I0929 19:26:46.082710 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-c17f-account-create-pmqwr" Sep 29 19:26:46 crc kubenswrapper[4779]: I0929 19:26:46.084340 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=3.175385137 podStartE2EDuration="7.084303875s" podCreationTimestamp="2025-09-29 19:26:39 +0000 UTC" firstStartedPulling="2025-09-29 19:26:40.903543744 +0000 UTC m=+1111.787968844" lastFinishedPulling="2025-09-29 19:26:44.812462482 +0000 UTC m=+1115.696887582" observedRunningTime="2025-09-29 19:26:46.06579844 +0000 UTC m=+1116.950223540" watchObservedRunningTime="2025-09-29 19:26:46.084303875 +0000 UTC m=+1116.968728965" Sep 29 19:26:46 crc kubenswrapper[4779]: I0929 19:26:46.093776 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=3.0937541729999998 podStartE2EDuration="3.093754173s" podCreationTimestamp="2025-09-29 19:26:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 19:26:46.093448444 +0000 UTC m=+1116.977873554" watchObservedRunningTime="2025-09-29 19:26:46.093754173 +0000 UTC m=+1116.978179273" Sep 29 19:26:46 crc kubenswrapper[4779]: I0929 19:26:46.115999 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Sep 29 19:26:46 crc kubenswrapper[4779]: I0929 19:26:46.287832 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/92130510-fe09-43ca-af98-8242adf45dc4-etc-machine-id\") pod \"92130510-fe09-43ca-af98-8242adf45dc4\" (UID: \"92130510-fe09-43ca-af98-8242adf45dc4\") " Sep 29 19:26:46 crc kubenswrapper[4779]: I0929 19:26:46.287999 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/92130510-fe09-43ca-af98-8242adf45dc4-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "92130510-fe09-43ca-af98-8242adf45dc4" (UID: "92130510-fe09-43ca-af98-8242adf45dc4"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 29 19:26:46 crc kubenswrapper[4779]: I0929 19:26:46.288141 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/92130510-fe09-43ca-af98-8242adf45dc4-combined-ca-bundle\") pod \"92130510-fe09-43ca-af98-8242adf45dc4\" (UID: \"92130510-fe09-43ca-af98-8242adf45dc4\") " Sep 29 19:26:46 crc kubenswrapper[4779]: I0929 19:26:46.288238 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/92130510-fe09-43ca-af98-8242adf45dc4-config-data\") pod \"92130510-fe09-43ca-af98-8242adf45dc4\" (UID: \"92130510-fe09-43ca-af98-8242adf45dc4\") " Sep 29 19:26:46 crc kubenswrapper[4779]: I0929 19:26:46.288270 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x6868\" (UniqueName: \"kubernetes.io/projected/92130510-fe09-43ca-af98-8242adf45dc4-kube-api-access-x6868\") pod \"92130510-fe09-43ca-af98-8242adf45dc4\" (UID: \"92130510-fe09-43ca-af98-8242adf45dc4\") " Sep 29 19:26:46 crc kubenswrapper[4779]: I0929 19:26:46.288927 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/92130510-fe09-43ca-af98-8242adf45dc4-scripts\") pod \"92130510-fe09-43ca-af98-8242adf45dc4\" (UID: \"92130510-fe09-43ca-af98-8242adf45dc4\") " Sep 29 19:26:46 crc kubenswrapper[4779]: I0929 19:26:46.288982 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/92130510-fe09-43ca-af98-8242adf45dc4-config-data-custom\") pod \"92130510-fe09-43ca-af98-8242adf45dc4\" (UID: \"92130510-fe09-43ca-af98-8242adf45dc4\") " Sep 29 19:26:46 crc kubenswrapper[4779]: I0929 19:26:46.289387 4779 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/92130510-fe09-43ca-af98-8242adf45dc4-etc-machine-id\") on node \"crc\" DevicePath \"\"" Sep 29 19:26:46 crc kubenswrapper[4779]: I0929 19:26:46.294487 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/92130510-fe09-43ca-af98-8242adf45dc4-scripts" (OuterVolumeSpecName: "scripts") pod "92130510-fe09-43ca-af98-8242adf45dc4" (UID: "92130510-fe09-43ca-af98-8242adf45dc4"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:26:46 crc kubenswrapper[4779]: I0929 19:26:46.294539 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/92130510-fe09-43ca-af98-8242adf45dc4-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "92130510-fe09-43ca-af98-8242adf45dc4" (UID: "92130510-fe09-43ca-af98-8242adf45dc4"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:26:46 crc kubenswrapper[4779]: I0929 19:26:46.304191 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/92130510-fe09-43ca-af98-8242adf45dc4-kube-api-access-x6868" (OuterVolumeSpecName: "kube-api-access-x6868") pod "92130510-fe09-43ca-af98-8242adf45dc4" (UID: "92130510-fe09-43ca-af98-8242adf45dc4"). InnerVolumeSpecName "kube-api-access-x6868". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:26:46 crc kubenswrapper[4779]: I0929 19:26:46.352464 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/92130510-fe09-43ca-af98-8242adf45dc4-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "92130510-fe09-43ca-af98-8242adf45dc4" (UID: "92130510-fe09-43ca-af98-8242adf45dc4"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:26:46 crc kubenswrapper[4779]: I0929 19:26:46.394623 4779 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/92130510-fe09-43ca-af98-8242adf45dc4-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 19:26:46 crc kubenswrapper[4779]: I0929 19:26:46.394661 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x6868\" (UniqueName: \"kubernetes.io/projected/92130510-fe09-43ca-af98-8242adf45dc4-kube-api-access-x6868\") on node \"crc\" DevicePath \"\"" Sep 29 19:26:46 crc kubenswrapper[4779]: I0929 19:26:46.394675 4779 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/92130510-fe09-43ca-af98-8242adf45dc4-scripts\") on node \"crc\" DevicePath \"\"" Sep 29 19:26:46 crc kubenswrapper[4779]: I0929 19:26:46.394685 4779 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/92130510-fe09-43ca-af98-8242adf45dc4-config-data-custom\") on node \"crc\" DevicePath \"\"" Sep 29 19:26:46 crc kubenswrapper[4779]: I0929 19:26:46.406454 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/92130510-fe09-43ca-af98-8242adf45dc4-config-data" (OuterVolumeSpecName: "config-data") pod "92130510-fe09-43ca-af98-8242adf45dc4" (UID: "92130510-fe09-43ca-af98-8242adf45dc4"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:26:46 crc kubenswrapper[4779]: I0929 19:26:46.496498 4779 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/92130510-fe09-43ca-af98-8242adf45dc4-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 19:26:47 crc kubenswrapper[4779]: I0929 19:26:47.093028 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"92130510-fe09-43ca-af98-8242adf45dc4","Type":"ContainerDied","Data":"60f04106403839174069b26e72813f96bb3eb3c5af6fd39ba1a19995bd8e42aa"} Sep 29 19:26:47 crc kubenswrapper[4779]: I0929 19:26:47.093106 4779 scope.go:117] "RemoveContainer" containerID="081fd9057c47d5c2cc052e736aefd00f28bb925c0f3ccc98b11a7ad45b66744f" Sep 29 19:26:47 crc kubenswrapper[4779]: I0929 19:26:47.094506 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Sep 29 19:26:47 crc kubenswrapper[4779]: I0929 19:26:47.094638 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="5e620ad7-10e9-4b45-85f4-a4971dc70cd9" containerName="proxy-httpd" containerID="cri-o://aaa3d5bdd6e011f3cd4039f32a6d31ebf291ac5bdc4e2081177abb7938c146f2" gracePeriod=30 Sep 29 19:26:47 crc kubenswrapper[4779]: I0929 19:26:47.094637 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="5e620ad7-10e9-4b45-85f4-a4971dc70cd9" containerName="ceilometer-central-agent" containerID="cri-o://15255d4af4eb627a03440c17fa7c6914d4719a32f020a9b69b0ba88a922b3c4b" gracePeriod=30 Sep 29 19:26:47 crc kubenswrapper[4779]: I0929 19:26:47.094714 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="5e620ad7-10e9-4b45-85f4-a4971dc70cd9" containerName="ceilometer-notification-agent" containerID="cri-o://de50681d5dea13e2ed88fdf1090e55567903ca4cb1d53ea431d460f72f455c11" gracePeriod=30 Sep 29 19:26:47 crc kubenswrapper[4779]: I0929 19:26:47.094699 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="5e620ad7-10e9-4b45-85f4-a4971dc70cd9" containerName="sg-core" containerID="cri-o://c2ebf37cd7a6c69d145c62cdb82bc2071da1d00bc5ecc8a922e58370fcccda54" gracePeriod=30 Sep 29 19:26:47 crc kubenswrapper[4779]: I0929 19:26:47.129336 4779 scope.go:117] "RemoveContainer" containerID="37d18db00a782ab4a30be90646e85b1eb28c893fd2ef59834eab4d14377820df" Sep 29 19:26:47 crc kubenswrapper[4779]: I0929 19:26:47.134455 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Sep 29 19:26:47 crc kubenswrapper[4779]: I0929 19:26:47.154119 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-scheduler-0"] Sep 29 19:26:47 crc kubenswrapper[4779]: I0929 19:26:47.164703 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-scheduler-0"] Sep 29 19:26:47 crc kubenswrapper[4779]: E0929 19:26:47.165071 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="511539e5-fd9d-4376-ae75-4c308f748af9" containerName="mariadb-account-create" Sep 29 19:26:47 crc kubenswrapper[4779]: I0929 19:26:47.165084 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="511539e5-fd9d-4376-ae75-4c308f748af9" containerName="mariadb-account-create" Sep 29 19:26:47 crc kubenswrapper[4779]: E0929 19:26:47.165094 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="92130510-fe09-43ca-af98-8242adf45dc4" containerName="probe" Sep 29 19:26:47 crc kubenswrapper[4779]: I0929 19:26:47.165099 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="92130510-fe09-43ca-af98-8242adf45dc4" containerName="probe" Sep 29 19:26:47 crc kubenswrapper[4779]: E0929 19:26:47.165109 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5e31e631-57a3-4b9b-8d0a-4a2e3cf7d083" containerName="mariadb-account-create" Sep 29 19:26:47 crc kubenswrapper[4779]: I0929 19:26:47.165115 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="5e31e631-57a3-4b9b-8d0a-4a2e3cf7d083" containerName="mariadb-account-create" Sep 29 19:26:47 crc kubenswrapper[4779]: E0929 19:26:47.165135 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="affb28d4-02a0-4da0-9160-14b003815d10" containerName="mariadb-account-create" Sep 29 19:26:47 crc kubenswrapper[4779]: I0929 19:26:47.165140 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="affb28d4-02a0-4da0-9160-14b003815d10" containerName="mariadb-account-create" Sep 29 19:26:47 crc kubenswrapper[4779]: E0929 19:26:47.165150 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="92130510-fe09-43ca-af98-8242adf45dc4" containerName="cinder-scheduler" Sep 29 19:26:47 crc kubenswrapper[4779]: I0929 19:26:47.165155 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="92130510-fe09-43ca-af98-8242adf45dc4" containerName="cinder-scheduler" Sep 29 19:26:47 crc kubenswrapper[4779]: I0929 19:26:47.165343 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="511539e5-fd9d-4376-ae75-4c308f748af9" containerName="mariadb-account-create" Sep 29 19:26:47 crc kubenswrapper[4779]: I0929 19:26:47.165359 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="92130510-fe09-43ca-af98-8242adf45dc4" containerName="cinder-scheduler" Sep 29 19:26:47 crc kubenswrapper[4779]: I0929 19:26:47.165366 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="affb28d4-02a0-4da0-9160-14b003815d10" containerName="mariadb-account-create" Sep 29 19:26:47 crc kubenswrapper[4779]: I0929 19:26:47.165381 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="92130510-fe09-43ca-af98-8242adf45dc4" containerName="probe" Sep 29 19:26:47 crc kubenswrapper[4779]: I0929 19:26:47.165390 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="5e31e631-57a3-4b9b-8d0a-4a2e3cf7d083" containerName="mariadb-account-create" Sep 29 19:26:47 crc kubenswrapper[4779]: I0929 19:26:47.166271 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Sep 29 19:26:47 crc kubenswrapper[4779]: I0929 19:26:47.170513 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scheduler-config-data" Sep 29 19:26:47 crc kubenswrapper[4779]: I0929 19:26:47.183000 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Sep 29 19:26:47 crc kubenswrapper[4779]: I0929 19:26:47.314049 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/a6f49188-efdd-4f27-ad02-4656f2cf5d11-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"a6f49188-efdd-4f27-ad02-4656f2cf5d11\") " pod="openstack/cinder-scheduler-0" Sep 29 19:26:47 crc kubenswrapper[4779]: I0929 19:26:47.314430 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a6f49188-efdd-4f27-ad02-4656f2cf5d11-scripts\") pod \"cinder-scheduler-0\" (UID: \"a6f49188-efdd-4f27-ad02-4656f2cf5d11\") " pod="openstack/cinder-scheduler-0" Sep 29 19:26:47 crc kubenswrapper[4779]: I0929 19:26:47.314476 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a6f49188-efdd-4f27-ad02-4656f2cf5d11-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"a6f49188-efdd-4f27-ad02-4656f2cf5d11\") " pod="openstack/cinder-scheduler-0" Sep 29 19:26:47 crc kubenswrapper[4779]: I0929 19:26:47.314518 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a6f49188-efdd-4f27-ad02-4656f2cf5d11-config-data\") pod \"cinder-scheduler-0\" (UID: \"a6f49188-efdd-4f27-ad02-4656f2cf5d11\") " pod="openstack/cinder-scheduler-0" Sep 29 19:26:47 crc kubenswrapper[4779]: I0929 19:26:47.314562 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/a6f49188-efdd-4f27-ad02-4656f2cf5d11-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"a6f49188-efdd-4f27-ad02-4656f2cf5d11\") " pod="openstack/cinder-scheduler-0" Sep 29 19:26:47 crc kubenswrapper[4779]: I0929 19:26:47.314594 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ctjdg\" (UniqueName: \"kubernetes.io/projected/a6f49188-efdd-4f27-ad02-4656f2cf5d11-kube-api-access-ctjdg\") pod \"cinder-scheduler-0\" (UID: \"a6f49188-efdd-4f27-ad02-4656f2cf5d11\") " pod="openstack/cinder-scheduler-0" Sep 29 19:26:47 crc kubenswrapper[4779]: I0929 19:26:47.416031 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/a6f49188-efdd-4f27-ad02-4656f2cf5d11-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"a6f49188-efdd-4f27-ad02-4656f2cf5d11\") " pod="openstack/cinder-scheduler-0" Sep 29 19:26:47 crc kubenswrapper[4779]: I0929 19:26:47.416112 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a6f49188-efdd-4f27-ad02-4656f2cf5d11-scripts\") pod \"cinder-scheduler-0\" (UID: \"a6f49188-efdd-4f27-ad02-4656f2cf5d11\") " pod="openstack/cinder-scheduler-0" Sep 29 19:26:47 crc kubenswrapper[4779]: I0929 19:26:47.416136 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a6f49188-efdd-4f27-ad02-4656f2cf5d11-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"a6f49188-efdd-4f27-ad02-4656f2cf5d11\") " pod="openstack/cinder-scheduler-0" Sep 29 19:26:47 crc kubenswrapper[4779]: I0929 19:26:47.416210 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a6f49188-efdd-4f27-ad02-4656f2cf5d11-config-data\") pod \"cinder-scheduler-0\" (UID: \"a6f49188-efdd-4f27-ad02-4656f2cf5d11\") " pod="openstack/cinder-scheduler-0" Sep 29 19:26:47 crc kubenswrapper[4779]: I0929 19:26:47.416247 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/a6f49188-efdd-4f27-ad02-4656f2cf5d11-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"a6f49188-efdd-4f27-ad02-4656f2cf5d11\") " pod="openstack/cinder-scheduler-0" Sep 29 19:26:47 crc kubenswrapper[4779]: I0929 19:26:47.416292 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ctjdg\" (UniqueName: \"kubernetes.io/projected/a6f49188-efdd-4f27-ad02-4656f2cf5d11-kube-api-access-ctjdg\") pod \"cinder-scheduler-0\" (UID: \"a6f49188-efdd-4f27-ad02-4656f2cf5d11\") " pod="openstack/cinder-scheduler-0" Sep 29 19:26:47 crc kubenswrapper[4779]: I0929 19:26:47.416218 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/a6f49188-efdd-4f27-ad02-4656f2cf5d11-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"a6f49188-efdd-4f27-ad02-4656f2cf5d11\") " pod="openstack/cinder-scheduler-0" Sep 29 19:26:47 crc kubenswrapper[4779]: I0929 19:26:47.421788 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/a6f49188-efdd-4f27-ad02-4656f2cf5d11-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"a6f49188-efdd-4f27-ad02-4656f2cf5d11\") " pod="openstack/cinder-scheduler-0" Sep 29 19:26:47 crc kubenswrapper[4779]: I0929 19:26:47.421974 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a6f49188-efdd-4f27-ad02-4656f2cf5d11-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"a6f49188-efdd-4f27-ad02-4656f2cf5d11\") " pod="openstack/cinder-scheduler-0" Sep 29 19:26:47 crc kubenswrapper[4779]: I0929 19:26:47.422541 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a6f49188-efdd-4f27-ad02-4656f2cf5d11-scripts\") pod \"cinder-scheduler-0\" (UID: \"a6f49188-efdd-4f27-ad02-4656f2cf5d11\") " pod="openstack/cinder-scheduler-0" Sep 29 19:26:47 crc kubenswrapper[4779]: I0929 19:26:47.424539 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a6f49188-efdd-4f27-ad02-4656f2cf5d11-config-data\") pod \"cinder-scheduler-0\" (UID: \"a6f49188-efdd-4f27-ad02-4656f2cf5d11\") " pod="openstack/cinder-scheduler-0" Sep 29 19:26:47 crc kubenswrapper[4779]: I0929 19:26:47.447962 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ctjdg\" (UniqueName: \"kubernetes.io/projected/a6f49188-efdd-4f27-ad02-4656f2cf5d11-kube-api-access-ctjdg\") pod \"cinder-scheduler-0\" (UID: \"a6f49188-efdd-4f27-ad02-4656f2cf5d11\") " pod="openstack/cinder-scheduler-0" Sep 29 19:26:47 crc kubenswrapper[4779]: I0929 19:26:47.494587 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Sep 29 19:26:47 crc kubenswrapper[4779]: I0929 19:26:47.645390 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-db-sync-z552g"] Sep 29 19:26:47 crc kubenswrapper[4779]: I0929 19:26:47.646833 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-z552g" Sep 29 19:26:47 crc kubenswrapper[4779]: I0929 19:26:47.651571 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-z552g"] Sep 29 19:26:47 crc kubenswrapper[4779]: I0929 19:26:47.664344 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-scripts" Sep 29 19:26:47 crc kubenswrapper[4779]: I0929 19:26:47.664527 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-nova-dockercfg-qn6vq" Sep 29 19:26:47 crc kubenswrapper[4779]: I0929 19:26:47.664633 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Sep 29 19:26:47 crc kubenswrapper[4779]: I0929 19:26:47.731898 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1ffdc490-1007-41e8-a410-97a78d400395-config-data\") pod \"nova-cell0-conductor-db-sync-z552g\" (UID: \"1ffdc490-1007-41e8-a410-97a78d400395\") " pod="openstack/nova-cell0-conductor-db-sync-z552g" Sep 29 19:26:47 crc kubenswrapper[4779]: I0929 19:26:47.732029 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1ffdc490-1007-41e8-a410-97a78d400395-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-z552g\" (UID: \"1ffdc490-1007-41e8-a410-97a78d400395\") " pod="openstack/nova-cell0-conductor-db-sync-z552g" Sep 29 19:26:47 crc kubenswrapper[4779]: I0929 19:26:47.732053 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kvms6\" (UniqueName: \"kubernetes.io/projected/1ffdc490-1007-41e8-a410-97a78d400395-kube-api-access-kvms6\") pod \"nova-cell0-conductor-db-sync-z552g\" (UID: \"1ffdc490-1007-41e8-a410-97a78d400395\") " pod="openstack/nova-cell0-conductor-db-sync-z552g" Sep 29 19:26:47 crc kubenswrapper[4779]: I0929 19:26:47.732119 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1ffdc490-1007-41e8-a410-97a78d400395-scripts\") pod \"nova-cell0-conductor-db-sync-z552g\" (UID: \"1ffdc490-1007-41e8-a410-97a78d400395\") " pod="openstack/nova-cell0-conductor-db-sync-z552g" Sep 29 19:26:47 crc kubenswrapper[4779]: I0929 19:26:47.781153 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="92130510-fe09-43ca-af98-8242adf45dc4" path="/var/lib/kubelet/pods/92130510-fe09-43ca-af98-8242adf45dc4/volumes" Sep 29 19:26:47 crc kubenswrapper[4779]: I0929 19:26:47.833670 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1ffdc490-1007-41e8-a410-97a78d400395-scripts\") pod \"nova-cell0-conductor-db-sync-z552g\" (UID: \"1ffdc490-1007-41e8-a410-97a78d400395\") " pod="openstack/nova-cell0-conductor-db-sync-z552g" Sep 29 19:26:47 crc kubenswrapper[4779]: I0929 19:26:47.833754 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1ffdc490-1007-41e8-a410-97a78d400395-config-data\") pod \"nova-cell0-conductor-db-sync-z552g\" (UID: \"1ffdc490-1007-41e8-a410-97a78d400395\") " pod="openstack/nova-cell0-conductor-db-sync-z552g" Sep 29 19:26:47 crc kubenswrapper[4779]: I0929 19:26:47.833834 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1ffdc490-1007-41e8-a410-97a78d400395-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-z552g\" (UID: \"1ffdc490-1007-41e8-a410-97a78d400395\") " pod="openstack/nova-cell0-conductor-db-sync-z552g" Sep 29 19:26:47 crc kubenswrapper[4779]: I0929 19:26:47.833861 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kvms6\" (UniqueName: \"kubernetes.io/projected/1ffdc490-1007-41e8-a410-97a78d400395-kube-api-access-kvms6\") pod \"nova-cell0-conductor-db-sync-z552g\" (UID: \"1ffdc490-1007-41e8-a410-97a78d400395\") " pod="openstack/nova-cell0-conductor-db-sync-z552g" Sep 29 19:26:47 crc kubenswrapper[4779]: I0929 19:26:47.842827 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1ffdc490-1007-41e8-a410-97a78d400395-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-z552g\" (UID: \"1ffdc490-1007-41e8-a410-97a78d400395\") " pod="openstack/nova-cell0-conductor-db-sync-z552g" Sep 29 19:26:47 crc kubenswrapper[4779]: I0929 19:26:47.842921 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1ffdc490-1007-41e8-a410-97a78d400395-config-data\") pod \"nova-cell0-conductor-db-sync-z552g\" (UID: \"1ffdc490-1007-41e8-a410-97a78d400395\") " pod="openstack/nova-cell0-conductor-db-sync-z552g" Sep 29 19:26:47 crc kubenswrapper[4779]: I0929 19:26:47.843146 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1ffdc490-1007-41e8-a410-97a78d400395-scripts\") pod \"nova-cell0-conductor-db-sync-z552g\" (UID: \"1ffdc490-1007-41e8-a410-97a78d400395\") " pod="openstack/nova-cell0-conductor-db-sync-z552g" Sep 29 19:26:47 crc kubenswrapper[4779]: I0929 19:26:47.859237 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kvms6\" (UniqueName: \"kubernetes.io/projected/1ffdc490-1007-41e8-a410-97a78d400395-kube-api-access-kvms6\") pod \"nova-cell0-conductor-db-sync-z552g\" (UID: \"1ffdc490-1007-41e8-a410-97a78d400395\") " pod="openstack/nova-cell0-conductor-db-sync-z552g" Sep 29 19:26:47 crc kubenswrapper[4779]: I0929 19:26:47.977629 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-z552g" Sep 29 19:26:47 crc kubenswrapper[4779]: I0929 19:26:47.999544 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Sep 29 19:26:48 crc kubenswrapper[4779]: I0929 19:26:48.074609 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 19:26:48 crc kubenswrapper[4779]: I0929 19:26:48.127936 4779 generic.go:334] "Generic (PLEG): container finished" podID="5e620ad7-10e9-4b45-85f4-a4971dc70cd9" containerID="aaa3d5bdd6e011f3cd4039f32a6d31ebf291ac5bdc4e2081177abb7938c146f2" exitCode=0 Sep 29 19:26:48 crc kubenswrapper[4779]: I0929 19:26:48.127965 4779 generic.go:334] "Generic (PLEG): container finished" podID="5e620ad7-10e9-4b45-85f4-a4971dc70cd9" containerID="c2ebf37cd7a6c69d145c62cdb82bc2071da1d00bc5ecc8a922e58370fcccda54" exitCode=2 Sep 29 19:26:48 crc kubenswrapper[4779]: I0929 19:26:48.127974 4779 generic.go:334] "Generic (PLEG): container finished" podID="5e620ad7-10e9-4b45-85f4-a4971dc70cd9" containerID="de50681d5dea13e2ed88fdf1090e55567903ca4cb1d53ea431d460f72f455c11" exitCode=0 Sep 29 19:26:48 crc kubenswrapper[4779]: I0929 19:26:48.127983 4779 generic.go:334] "Generic (PLEG): container finished" podID="5e620ad7-10e9-4b45-85f4-a4971dc70cd9" containerID="15255d4af4eb627a03440c17fa7c6914d4719a32f020a9b69b0ba88a922b3c4b" exitCode=0 Sep 29 19:26:48 crc kubenswrapper[4779]: I0929 19:26:48.128039 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"5e620ad7-10e9-4b45-85f4-a4971dc70cd9","Type":"ContainerDied","Data":"aaa3d5bdd6e011f3cd4039f32a6d31ebf291ac5bdc4e2081177abb7938c146f2"} Sep 29 19:26:48 crc kubenswrapper[4779]: I0929 19:26:48.128070 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"5e620ad7-10e9-4b45-85f4-a4971dc70cd9","Type":"ContainerDied","Data":"c2ebf37cd7a6c69d145c62cdb82bc2071da1d00bc5ecc8a922e58370fcccda54"} Sep 29 19:26:48 crc kubenswrapper[4779]: I0929 19:26:48.128085 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"5e620ad7-10e9-4b45-85f4-a4971dc70cd9","Type":"ContainerDied","Data":"de50681d5dea13e2ed88fdf1090e55567903ca4cb1d53ea431d460f72f455c11"} Sep 29 19:26:48 crc kubenswrapper[4779]: I0929 19:26:48.128096 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"5e620ad7-10e9-4b45-85f4-a4971dc70cd9","Type":"ContainerDied","Data":"15255d4af4eb627a03440c17fa7c6914d4719a32f020a9b69b0ba88a922b3c4b"} Sep 29 19:26:48 crc kubenswrapper[4779]: I0929 19:26:48.128117 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"5e620ad7-10e9-4b45-85f4-a4971dc70cd9","Type":"ContainerDied","Data":"17bf5115f8dce94b4499a7453a0d3ea1827948414610c0a8a14715626b031d7d"} Sep 29 19:26:48 crc kubenswrapper[4779]: I0929 19:26:48.128135 4779 scope.go:117] "RemoveContainer" containerID="aaa3d5bdd6e011f3cd4039f32a6d31ebf291ac5bdc4e2081177abb7938c146f2" Sep 29 19:26:48 crc kubenswrapper[4779]: I0929 19:26:48.128275 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 19:26:48 crc kubenswrapper[4779]: I0929 19:26:48.152220 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"a6f49188-efdd-4f27-ad02-4656f2cf5d11","Type":"ContainerStarted","Data":"b5539c03b5d027c4b913df1bc84279968957c2ad34dc4d22cf3e59f3fe55766e"} Sep 29 19:26:48 crc kubenswrapper[4779]: I0929 19:26:48.188654 4779 scope.go:117] "RemoveContainer" containerID="c2ebf37cd7a6c69d145c62cdb82bc2071da1d00bc5ecc8a922e58370fcccda54" Sep 29 19:26:48 crc kubenswrapper[4779]: I0929 19:26:48.224758 4779 scope.go:117] "RemoveContainer" containerID="de50681d5dea13e2ed88fdf1090e55567903ca4cb1d53ea431d460f72f455c11" Sep 29 19:26:48 crc kubenswrapper[4779]: I0929 19:26:48.245227 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5e620ad7-10e9-4b45-85f4-a4971dc70cd9-log-httpd\") pod \"5e620ad7-10e9-4b45-85f4-a4971dc70cd9\" (UID: \"5e620ad7-10e9-4b45-85f4-a4971dc70cd9\") " Sep 29 19:26:48 crc kubenswrapper[4779]: I0929 19:26:48.245358 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5e620ad7-10e9-4b45-85f4-a4971dc70cd9-config-data\") pod \"5e620ad7-10e9-4b45-85f4-a4971dc70cd9\" (UID: \"5e620ad7-10e9-4b45-85f4-a4971dc70cd9\") " Sep 29 19:26:48 crc kubenswrapper[4779]: I0929 19:26:48.245407 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-62rh5\" (UniqueName: \"kubernetes.io/projected/5e620ad7-10e9-4b45-85f4-a4971dc70cd9-kube-api-access-62rh5\") pod \"5e620ad7-10e9-4b45-85f4-a4971dc70cd9\" (UID: \"5e620ad7-10e9-4b45-85f4-a4971dc70cd9\") " Sep 29 19:26:48 crc kubenswrapper[4779]: I0929 19:26:48.245443 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5e620ad7-10e9-4b45-85f4-a4971dc70cd9-combined-ca-bundle\") pod \"5e620ad7-10e9-4b45-85f4-a4971dc70cd9\" (UID: \"5e620ad7-10e9-4b45-85f4-a4971dc70cd9\") " Sep 29 19:26:48 crc kubenswrapper[4779]: I0929 19:26:48.245474 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5e620ad7-10e9-4b45-85f4-a4971dc70cd9-run-httpd\") pod \"5e620ad7-10e9-4b45-85f4-a4971dc70cd9\" (UID: \"5e620ad7-10e9-4b45-85f4-a4971dc70cd9\") " Sep 29 19:26:48 crc kubenswrapper[4779]: I0929 19:26:48.245498 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5e620ad7-10e9-4b45-85f4-a4971dc70cd9-scripts\") pod \"5e620ad7-10e9-4b45-85f4-a4971dc70cd9\" (UID: \"5e620ad7-10e9-4b45-85f4-a4971dc70cd9\") " Sep 29 19:26:48 crc kubenswrapper[4779]: I0929 19:26:48.245527 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/5e620ad7-10e9-4b45-85f4-a4971dc70cd9-sg-core-conf-yaml\") pod \"5e620ad7-10e9-4b45-85f4-a4971dc70cd9\" (UID: \"5e620ad7-10e9-4b45-85f4-a4971dc70cd9\") " Sep 29 19:26:48 crc kubenswrapper[4779]: I0929 19:26:48.246149 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5e620ad7-10e9-4b45-85f4-a4971dc70cd9-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "5e620ad7-10e9-4b45-85f4-a4971dc70cd9" (UID: "5e620ad7-10e9-4b45-85f4-a4971dc70cd9"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 19:26:48 crc kubenswrapper[4779]: I0929 19:26:48.246640 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5e620ad7-10e9-4b45-85f4-a4971dc70cd9-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "5e620ad7-10e9-4b45-85f4-a4971dc70cd9" (UID: "5e620ad7-10e9-4b45-85f4-a4971dc70cd9"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 19:26:48 crc kubenswrapper[4779]: I0929 19:26:48.261229 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5e620ad7-10e9-4b45-85f4-a4971dc70cd9-scripts" (OuterVolumeSpecName: "scripts") pod "5e620ad7-10e9-4b45-85f4-a4971dc70cd9" (UID: "5e620ad7-10e9-4b45-85f4-a4971dc70cd9"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:26:48 crc kubenswrapper[4779]: I0929 19:26:48.264507 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5e620ad7-10e9-4b45-85f4-a4971dc70cd9-kube-api-access-62rh5" (OuterVolumeSpecName: "kube-api-access-62rh5") pod "5e620ad7-10e9-4b45-85f4-a4971dc70cd9" (UID: "5e620ad7-10e9-4b45-85f4-a4971dc70cd9"). InnerVolumeSpecName "kube-api-access-62rh5". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:26:48 crc kubenswrapper[4779]: I0929 19:26:48.271557 4779 scope.go:117] "RemoveContainer" containerID="15255d4af4eb627a03440c17fa7c6914d4719a32f020a9b69b0ba88a922b3c4b" Sep 29 19:26:48 crc kubenswrapper[4779]: I0929 19:26:48.323150 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5e620ad7-10e9-4b45-85f4-a4971dc70cd9-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "5e620ad7-10e9-4b45-85f4-a4971dc70cd9" (UID: "5e620ad7-10e9-4b45-85f4-a4971dc70cd9"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:26:48 crc kubenswrapper[4779]: I0929 19:26:48.332765 4779 scope.go:117] "RemoveContainer" containerID="aaa3d5bdd6e011f3cd4039f32a6d31ebf291ac5bdc4e2081177abb7938c146f2" Sep 29 19:26:48 crc kubenswrapper[4779]: E0929 19:26:48.333646 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"aaa3d5bdd6e011f3cd4039f32a6d31ebf291ac5bdc4e2081177abb7938c146f2\": container with ID starting with aaa3d5bdd6e011f3cd4039f32a6d31ebf291ac5bdc4e2081177abb7938c146f2 not found: ID does not exist" containerID="aaa3d5bdd6e011f3cd4039f32a6d31ebf291ac5bdc4e2081177abb7938c146f2" Sep 29 19:26:48 crc kubenswrapper[4779]: I0929 19:26:48.333677 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"aaa3d5bdd6e011f3cd4039f32a6d31ebf291ac5bdc4e2081177abb7938c146f2"} err="failed to get container status \"aaa3d5bdd6e011f3cd4039f32a6d31ebf291ac5bdc4e2081177abb7938c146f2\": rpc error: code = NotFound desc = could not find container \"aaa3d5bdd6e011f3cd4039f32a6d31ebf291ac5bdc4e2081177abb7938c146f2\": container with ID starting with aaa3d5bdd6e011f3cd4039f32a6d31ebf291ac5bdc4e2081177abb7938c146f2 not found: ID does not exist" Sep 29 19:26:48 crc kubenswrapper[4779]: I0929 19:26:48.333697 4779 scope.go:117] "RemoveContainer" containerID="c2ebf37cd7a6c69d145c62cdb82bc2071da1d00bc5ecc8a922e58370fcccda54" Sep 29 19:26:48 crc kubenswrapper[4779]: E0929 19:26:48.334015 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c2ebf37cd7a6c69d145c62cdb82bc2071da1d00bc5ecc8a922e58370fcccda54\": container with ID starting with c2ebf37cd7a6c69d145c62cdb82bc2071da1d00bc5ecc8a922e58370fcccda54 not found: ID does not exist" containerID="c2ebf37cd7a6c69d145c62cdb82bc2071da1d00bc5ecc8a922e58370fcccda54" Sep 29 19:26:48 crc kubenswrapper[4779]: I0929 19:26:48.334031 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c2ebf37cd7a6c69d145c62cdb82bc2071da1d00bc5ecc8a922e58370fcccda54"} err="failed to get container status \"c2ebf37cd7a6c69d145c62cdb82bc2071da1d00bc5ecc8a922e58370fcccda54\": rpc error: code = NotFound desc = could not find container \"c2ebf37cd7a6c69d145c62cdb82bc2071da1d00bc5ecc8a922e58370fcccda54\": container with ID starting with c2ebf37cd7a6c69d145c62cdb82bc2071da1d00bc5ecc8a922e58370fcccda54 not found: ID does not exist" Sep 29 19:26:48 crc kubenswrapper[4779]: I0929 19:26:48.334043 4779 scope.go:117] "RemoveContainer" containerID="de50681d5dea13e2ed88fdf1090e55567903ca4cb1d53ea431d460f72f455c11" Sep 29 19:26:48 crc kubenswrapper[4779]: E0929 19:26:48.334310 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"de50681d5dea13e2ed88fdf1090e55567903ca4cb1d53ea431d460f72f455c11\": container with ID starting with de50681d5dea13e2ed88fdf1090e55567903ca4cb1d53ea431d460f72f455c11 not found: ID does not exist" containerID="de50681d5dea13e2ed88fdf1090e55567903ca4cb1d53ea431d460f72f455c11" Sep 29 19:26:48 crc kubenswrapper[4779]: I0929 19:26:48.334337 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"de50681d5dea13e2ed88fdf1090e55567903ca4cb1d53ea431d460f72f455c11"} err="failed to get container status \"de50681d5dea13e2ed88fdf1090e55567903ca4cb1d53ea431d460f72f455c11\": rpc error: code = NotFound desc = could not find container \"de50681d5dea13e2ed88fdf1090e55567903ca4cb1d53ea431d460f72f455c11\": container with ID starting with de50681d5dea13e2ed88fdf1090e55567903ca4cb1d53ea431d460f72f455c11 not found: ID does not exist" Sep 29 19:26:48 crc kubenswrapper[4779]: I0929 19:26:48.334353 4779 scope.go:117] "RemoveContainer" containerID="15255d4af4eb627a03440c17fa7c6914d4719a32f020a9b69b0ba88a922b3c4b" Sep 29 19:26:48 crc kubenswrapper[4779]: E0929 19:26:48.334798 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"15255d4af4eb627a03440c17fa7c6914d4719a32f020a9b69b0ba88a922b3c4b\": container with ID starting with 15255d4af4eb627a03440c17fa7c6914d4719a32f020a9b69b0ba88a922b3c4b not found: ID does not exist" containerID="15255d4af4eb627a03440c17fa7c6914d4719a32f020a9b69b0ba88a922b3c4b" Sep 29 19:26:48 crc kubenswrapper[4779]: I0929 19:26:48.334818 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"15255d4af4eb627a03440c17fa7c6914d4719a32f020a9b69b0ba88a922b3c4b"} err="failed to get container status \"15255d4af4eb627a03440c17fa7c6914d4719a32f020a9b69b0ba88a922b3c4b\": rpc error: code = NotFound desc = could not find container \"15255d4af4eb627a03440c17fa7c6914d4719a32f020a9b69b0ba88a922b3c4b\": container with ID starting with 15255d4af4eb627a03440c17fa7c6914d4719a32f020a9b69b0ba88a922b3c4b not found: ID does not exist" Sep 29 19:26:48 crc kubenswrapper[4779]: I0929 19:26:48.334830 4779 scope.go:117] "RemoveContainer" containerID="aaa3d5bdd6e011f3cd4039f32a6d31ebf291ac5bdc4e2081177abb7938c146f2" Sep 29 19:26:48 crc kubenswrapper[4779]: I0929 19:26:48.335283 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"aaa3d5bdd6e011f3cd4039f32a6d31ebf291ac5bdc4e2081177abb7938c146f2"} err="failed to get container status \"aaa3d5bdd6e011f3cd4039f32a6d31ebf291ac5bdc4e2081177abb7938c146f2\": rpc error: code = NotFound desc = could not find container \"aaa3d5bdd6e011f3cd4039f32a6d31ebf291ac5bdc4e2081177abb7938c146f2\": container with ID starting with aaa3d5bdd6e011f3cd4039f32a6d31ebf291ac5bdc4e2081177abb7938c146f2 not found: ID does not exist" Sep 29 19:26:48 crc kubenswrapper[4779]: I0929 19:26:48.335302 4779 scope.go:117] "RemoveContainer" containerID="c2ebf37cd7a6c69d145c62cdb82bc2071da1d00bc5ecc8a922e58370fcccda54" Sep 29 19:26:48 crc kubenswrapper[4779]: I0929 19:26:48.335615 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c2ebf37cd7a6c69d145c62cdb82bc2071da1d00bc5ecc8a922e58370fcccda54"} err="failed to get container status \"c2ebf37cd7a6c69d145c62cdb82bc2071da1d00bc5ecc8a922e58370fcccda54\": rpc error: code = NotFound desc = could not find container \"c2ebf37cd7a6c69d145c62cdb82bc2071da1d00bc5ecc8a922e58370fcccda54\": container with ID starting with c2ebf37cd7a6c69d145c62cdb82bc2071da1d00bc5ecc8a922e58370fcccda54 not found: ID does not exist" Sep 29 19:26:48 crc kubenswrapper[4779]: I0929 19:26:48.335637 4779 scope.go:117] "RemoveContainer" containerID="de50681d5dea13e2ed88fdf1090e55567903ca4cb1d53ea431d460f72f455c11" Sep 29 19:26:48 crc kubenswrapper[4779]: I0929 19:26:48.336573 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"de50681d5dea13e2ed88fdf1090e55567903ca4cb1d53ea431d460f72f455c11"} err="failed to get container status \"de50681d5dea13e2ed88fdf1090e55567903ca4cb1d53ea431d460f72f455c11\": rpc error: code = NotFound desc = could not find container \"de50681d5dea13e2ed88fdf1090e55567903ca4cb1d53ea431d460f72f455c11\": container with ID starting with de50681d5dea13e2ed88fdf1090e55567903ca4cb1d53ea431d460f72f455c11 not found: ID does not exist" Sep 29 19:26:48 crc kubenswrapper[4779]: I0929 19:26:48.336590 4779 scope.go:117] "RemoveContainer" containerID="15255d4af4eb627a03440c17fa7c6914d4719a32f020a9b69b0ba88a922b3c4b" Sep 29 19:26:48 crc kubenswrapper[4779]: I0929 19:26:48.336991 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"15255d4af4eb627a03440c17fa7c6914d4719a32f020a9b69b0ba88a922b3c4b"} err="failed to get container status \"15255d4af4eb627a03440c17fa7c6914d4719a32f020a9b69b0ba88a922b3c4b\": rpc error: code = NotFound desc = could not find container \"15255d4af4eb627a03440c17fa7c6914d4719a32f020a9b69b0ba88a922b3c4b\": container with ID starting with 15255d4af4eb627a03440c17fa7c6914d4719a32f020a9b69b0ba88a922b3c4b not found: ID does not exist" Sep 29 19:26:48 crc kubenswrapper[4779]: I0929 19:26:48.337005 4779 scope.go:117] "RemoveContainer" containerID="aaa3d5bdd6e011f3cd4039f32a6d31ebf291ac5bdc4e2081177abb7938c146f2" Sep 29 19:26:48 crc kubenswrapper[4779]: I0929 19:26:48.337597 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5e620ad7-10e9-4b45-85f4-a4971dc70cd9-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "5e620ad7-10e9-4b45-85f4-a4971dc70cd9" (UID: "5e620ad7-10e9-4b45-85f4-a4971dc70cd9"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:26:48 crc kubenswrapper[4779]: I0929 19:26:48.343902 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"aaa3d5bdd6e011f3cd4039f32a6d31ebf291ac5bdc4e2081177abb7938c146f2"} err="failed to get container status \"aaa3d5bdd6e011f3cd4039f32a6d31ebf291ac5bdc4e2081177abb7938c146f2\": rpc error: code = NotFound desc = could not find container \"aaa3d5bdd6e011f3cd4039f32a6d31ebf291ac5bdc4e2081177abb7938c146f2\": container with ID starting with aaa3d5bdd6e011f3cd4039f32a6d31ebf291ac5bdc4e2081177abb7938c146f2 not found: ID does not exist" Sep 29 19:26:48 crc kubenswrapper[4779]: I0929 19:26:48.343935 4779 scope.go:117] "RemoveContainer" containerID="c2ebf37cd7a6c69d145c62cdb82bc2071da1d00bc5ecc8a922e58370fcccda54" Sep 29 19:26:48 crc kubenswrapper[4779]: I0929 19:26:48.344445 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c2ebf37cd7a6c69d145c62cdb82bc2071da1d00bc5ecc8a922e58370fcccda54"} err="failed to get container status \"c2ebf37cd7a6c69d145c62cdb82bc2071da1d00bc5ecc8a922e58370fcccda54\": rpc error: code = NotFound desc = could not find container \"c2ebf37cd7a6c69d145c62cdb82bc2071da1d00bc5ecc8a922e58370fcccda54\": container with ID starting with c2ebf37cd7a6c69d145c62cdb82bc2071da1d00bc5ecc8a922e58370fcccda54 not found: ID does not exist" Sep 29 19:26:48 crc kubenswrapper[4779]: I0929 19:26:48.344468 4779 scope.go:117] "RemoveContainer" containerID="de50681d5dea13e2ed88fdf1090e55567903ca4cb1d53ea431d460f72f455c11" Sep 29 19:26:48 crc kubenswrapper[4779]: I0929 19:26:48.345286 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"de50681d5dea13e2ed88fdf1090e55567903ca4cb1d53ea431d460f72f455c11"} err="failed to get container status \"de50681d5dea13e2ed88fdf1090e55567903ca4cb1d53ea431d460f72f455c11\": rpc error: code = NotFound desc = could not find container \"de50681d5dea13e2ed88fdf1090e55567903ca4cb1d53ea431d460f72f455c11\": container with ID starting with de50681d5dea13e2ed88fdf1090e55567903ca4cb1d53ea431d460f72f455c11 not found: ID does not exist" Sep 29 19:26:48 crc kubenswrapper[4779]: I0929 19:26:48.345304 4779 scope.go:117] "RemoveContainer" containerID="15255d4af4eb627a03440c17fa7c6914d4719a32f020a9b69b0ba88a922b3c4b" Sep 29 19:26:48 crc kubenswrapper[4779]: I0929 19:26:48.348808 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-62rh5\" (UniqueName: \"kubernetes.io/projected/5e620ad7-10e9-4b45-85f4-a4971dc70cd9-kube-api-access-62rh5\") on node \"crc\" DevicePath \"\"" Sep 29 19:26:48 crc kubenswrapper[4779]: I0929 19:26:48.348833 4779 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5e620ad7-10e9-4b45-85f4-a4971dc70cd9-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 19:26:48 crc kubenswrapper[4779]: I0929 19:26:48.348844 4779 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5e620ad7-10e9-4b45-85f4-a4971dc70cd9-run-httpd\") on node \"crc\" DevicePath \"\"" Sep 29 19:26:48 crc kubenswrapper[4779]: I0929 19:26:48.348852 4779 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5e620ad7-10e9-4b45-85f4-a4971dc70cd9-scripts\") on node \"crc\" DevicePath \"\"" Sep 29 19:26:48 crc kubenswrapper[4779]: I0929 19:26:48.348861 4779 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/5e620ad7-10e9-4b45-85f4-a4971dc70cd9-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Sep 29 19:26:48 crc kubenswrapper[4779]: I0929 19:26:48.348869 4779 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5e620ad7-10e9-4b45-85f4-a4971dc70cd9-log-httpd\") on node \"crc\" DevicePath \"\"" Sep 29 19:26:48 crc kubenswrapper[4779]: I0929 19:26:48.348904 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"15255d4af4eb627a03440c17fa7c6914d4719a32f020a9b69b0ba88a922b3c4b"} err="failed to get container status \"15255d4af4eb627a03440c17fa7c6914d4719a32f020a9b69b0ba88a922b3c4b\": rpc error: code = NotFound desc = could not find container \"15255d4af4eb627a03440c17fa7c6914d4719a32f020a9b69b0ba88a922b3c4b\": container with ID starting with 15255d4af4eb627a03440c17fa7c6914d4719a32f020a9b69b0ba88a922b3c4b not found: ID does not exist" Sep 29 19:26:48 crc kubenswrapper[4779]: I0929 19:26:48.348926 4779 scope.go:117] "RemoveContainer" containerID="aaa3d5bdd6e011f3cd4039f32a6d31ebf291ac5bdc4e2081177abb7938c146f2" Sep 29 19:26:48 crc kubenswrapper[4779]: I0929 19:26:48.349402 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"aaa3d5bdd6e011f3cd4039f32a6d31ebf291ac5bdc4e2081177abb7938c146f2"} err="failed to get container status \"aaa3d5bdd6e011f3cd4039f32a6d31ebf291ac5bdc4e2081177abb7938c146f2\": rpc error: code = NotFound desc = could not find container \"aaa3d5bdd6e011f3cd4039f32a6d31ebf291ac5bdc4e2081177abb7938c146f2\": container with ID starting with aaa3d5bdd6e011f3cd4039f32a6d31ebf291ac5bdc4e2081177abb7938c146f2 not found: ID does not exist" Sep 29 19:26:48 crc kubenswrapper[4779]: I0929 19:26:48.349417 4779 scope.go:117] "RemoveContainer" containerID="c2ebf37cd7a6c69d145c62cdb82bc2071da1d00bc5ecc8a922e58370fcccda54" Sep 29 19:26:48 crc kubenswrapper[4779]: I0929 19:26:48.349577 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c2ebf37cd7a6c69d145c62cdb82bc2071da1d00bc5ecc8a922e58370fcccda54"} err="failed to get container status \"c2ebf37cd7a6c69d145c62cdb82bc2071da1d00bc5ecc8a922e58370fcccda54\": rpc error: code = NotFound desc = could not find container \"c2ebf37cd7a6c69d145c62cdb82bc2071da1d00bc5ecc8a922e58370fcccda54\": container with ID starting with c2ebf37cd7a6c69d145c62cdb82bc2071da1d00bc5ecc8a922e58370fcccda54 not found: ID does not exist" Sep 29 19:26:48 crc kubenswrapper[4779]: I0929 19:26:48.349589 4779 scope.go:117] "RemoveContainer" containerID="de50681d5dea13e2ed88fdf1090e55567903ca4cb1d53ea431d460f72f455c11" Sep 29 19:26:48 crc kubenswrapper[4779]: I0929 19:26:48.349799 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"de50681d5dea13e2ed88fdf1090e55567903ca4cb1d53ea431d460f72f455c11"} err="failed to get container status \"de50681d5dea13e2ed88fdf1090e55567903ca4cb1d53ea431d460f72f455c11\": rpc error: code = NotFound desc = could not find container \"de50681d5dea13e2ed88fdf1090e55567903ca4cb1d53ea431d460f72f455c11\": container with ID starting with de50681d5dea13e2ed88fdf1090e55567903ca4cb1d53ea431d460f72f455c11 not found: ID does not exist" Sep 29 19:26:48 crc kubenswrapper[4779]: I0929 19:26:48.349818 4779 scope.go:117] "RemoveContainer" containerID="15255d4af4eb627a03440c17fa7c6914d4719a32f020a9b69b0ba88a922b3c4b" Sep 29 19:26:48 crc kubenswrapper[4779]: I0929 19:26:48.350180 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"15255d4af4eb627a03440c17fa7c6914d4719a32f020a9b69b0ba88a922b3c4b"} err="failed to get container status \"15255d4af4eb627a03440c17fa7c6914d4719a32f020a9b69b0ba88a922b3c4b\": rpc error: code = NotFound desc = could not find container \"15255d4af4eb627a03440c17fa7c6914d4719a32f020a9b69b0ba88a922b3c4b\": container with ID starting with 15255d4af4eb627a03440c17fa7c6914d4719a32f020a9b69b0ba88a922b3c4b not found: ID does not exist" Sep 29 19:26:48 crc kubenswrapper[4779]: I0929 19:26:48.389301 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Sep 29 19:26:48 crc kubenswrapper[4779]: I0929 19:26:48.389357 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Sep 29 19:26:48 crc kubenswrapper[4779]: I0929 19:26:48.449680 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Sep 29 19:26:48 crc kubenswrapper[4779]: I0929 19:26:48.458145 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Sep 29 19:26:48 crc kubenswrapper[4779]: I0929 19:26:48.458659 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5e620ad7-10e9-4b45-85f4-a4971dc70cd9-config-data" (OuterVolumeSpecName: "config-data") pod "5e620ad7-10e9-4b45-85f4-a4971dc70cd9" (UID: "5e620ad7-10e9-4b45-85f4-a4971dc70cd9"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:26:48 crc kubenswrapper[4779]: I0929 19:26:48.531343 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-z552g"] Sep 29 19:26:48 crc kubenswrapper[4779]: W0929 19:26:48.535711 4779 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1ffdc490_1007_41e8_a410_97a78d400395.slice/crio-884b7083c272788693584552bc8512f7ab999b2bfeffc1ead266268aaa1801d5 WatchSource:0}: Error finding container 884b7083c272788693584552bc8512f7ab999b2bfeffc1ead266268aaa1801d5: Status 404 returned error can't find the container with id 884b7083c272788693584552bc8512f7ab999b2bfeffc1ead266268aaa1801d5 Sep 29 19:26:48 crc kubenswrapper[4779]: I0929 19:26:48.569132 4779 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5e620ad7-10e9-4b45-85f4-a4971dc70cd9-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 19:26:48 crc kubenswrapper[4779]: I0929 19:26:48.767271 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Sep 29 19:26:48 crc kubenswrapper[4779]: I0929 19:26:48.779565 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Sep 29 19:26:48 crc kubenswrapper[4779]: I0929 19:26:48.792070 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Sep 29 19:26:48 crc kubenswrapper[4779]: E0929 19:26:48.792523 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5e620ad7-10e9-4b45-85f4-a4971dc70cd9" containerName="sg-core" Sep 29 19:26:48 crc kubenswrapper[4779]: I0929 19:26:48.792547 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="5e620ad7-10e9-4b45-85f4-a4971dc70cd9" containerName="sg-core" Sep 29 19:26:48 crc kubenswrapper[4779]: E0929 19:26:48.792565 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5e620ad7-10e9-4b45-85f4-a4971dc70cd9" containerName="ceilometer-central-agent" Sep 29 19:26:48 crc kubenswrapper[4779]: I0929 19:26:48.792573 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="5e620ad7-10e9-4b45-85f4-a4971dc70cd9" containerName="ceilometer-central-agent" Sep 29 19:26:48 crc kubenswrapper[4779]: E0929 19:26:48.792595 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5e620ad7-10e9-4b45-85f4-a4971dc70cd9" containerName="ceilometer-notification-agent" Sep 29 19:26:48 crc kubenswrapper[4779]: I0929 19:26:48.792606 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="5e620ad7-10e9-4b45-85f4-a4971dc70cd9" containerName="ceilometer-notification-agent" Sep 29 19:26:48 crc kubenswrapper[4779]: E0929 19:26:48.792623 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5e620ad7-10e9-4b45-85f4-a4971dc70cd9" containerName="proxy-httpd" Sep 29 19:26:48 crc kubenswrapper[4779]: I0929 19:26:48.792630 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="5e620ad7-10e9-4b45-85f4-a4971dc70cd9" containerName="proxy-httpd" Sep 29 19:26:48 crc kubenswrapper[4779]: I0929 19:26:48.792882 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="5e620ad7-10e9-4b45-85f4-a4971dc70cd9" containerName="proxy-httpd" Sep 29 19:26:48 crc kubenswrapper[4779]: I0929 19:26:48.792908 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="5e620ad7-10e9-4b45-85f4-a4971dc70cd9" containerName="sg-core" Sep 29 19:26:48 crc kubenswrapper[4779]: I0929 19:26:48.792923 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="5e620ad7-10e9-4b45-85f4-a4971dc70cd9" containerName="ceilometer-notification-agent" Sep 29 19:26:48 crc kubenswrapper[4779]: I0929 19:26:48.792936 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="5e620ad7-10e9-4b45-85f4-a4971dc70cd9" containerName="ceilometer-central-agent" Sep 29 19:26:48 crc kubenswrapper[4779]: I0929 19:26:48.795776 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 19:26:48 crc kubenswrapper[4779]: I0929 19:26:48.801462 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 29 19:26:48 crc kubenswrapper[4779]: I0929 19:26:48.802706 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Sep 29 19:26:48 crc kubenswrapper[4779]: I0929 19:26:48.802880 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Sep 29 19:26:48 crc kubenswrapper[4779]: I0929 19:26:48.975290 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e7f029b6-cd0a-4885-8b00-9736f456a25f-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"e7f029b6-cd0a-4885-8b00-9736f456a25f\") " pod="openstack/ceilometer-0" Sep 29 19:26:48 crc kubenswrapper[4779]: I0929 19:26:48.975349 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/e7f029b6-cd0a-4885-8b00-9736f456a25f-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"e7f029b6-cd0a-4885-8b00-9736f456a25f\") " pod="openstack/ceilometer-0" Sep 29 19:26:48 crc kubenswrapper[4779]: I0929 19:26:48.975378 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e7f029b6-cd0a-4885-8b00-9736f456a25f-config-data\") pod \"ceilometer-0\" (UID: \"e7f029b6-cd0a-4885-8b00-9736f456a25f\") " pod="openstack/ceilometer-0" Sep 29 19:26:48 crc kubenswrapper[4779]: I0929 19:26:48.975427 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e7f029b6-cd0a-4885-8b00-9736f456a25f-run-httpd\") pod \"ceilometer-0\" (UID: \"e7f029b6-cd0a-4885-8b00-9736f456a25f\") " pod="openstack/ceilometer-0" Sep 29 19:26:48 crc kubenswrapper[4779]: I0929 19:26:48.975511 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e7f029b6-cd0a-4885-8b00-9736f456a25f-log-httpd\") pod \"ceilometer-0\" (UID: \"e7f029b6-cd0a-4885-8b00-9736f456a25f\") " pod="openstack/ceilometer-0" Sep 29 19:26:48 crc kubenswrapper[4779]: I0929 19:26:48.975551 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e7f029b6-cd0a-4885-8b00-9736f456a25f-scripts\") pod \"ceilometer-0\" (UID: \"e7f029b6-cd0a-4885-8b00-9736f456a25f\") " pod="openstack/ceilometer-0" Sep 29 19:26:48 crc kubenswrapper[4779]: I0929 19:26:48.975590 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xjdtg\" (UniqueName: \"kubernetes.io/projected/e7f029b6-cd0a-4885-8b00-9736f456a25f-kube-api-access-xjdtg\") pod \"ceilometer-0\" (UID: \"e7f029b6-cd0a-4885-8b00-9736f456a25f\") " pod="openstack/ceilometer-0" Sep 29 19:26:49 crc kubenswrapper[4779]: I0929 19:26:49.077803 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e7f029b6-cd0a-4885-8b00-9736f456a25f-log-httpd\") pod \"ceilometer-0\" (UID: \"e7f029b6-cd0a-4885-8b00-9736f456a25f\") " pod="openstack/ceilometer-0" Sep 29 19:26:49 crc kubenswrapper[4779]: I0929 19:26:49.077887 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e7f029b6-cd0a-4885-8b00-9736f456a25f-scripts\") pod \"ceilometer-0\" (UID: \"e7f029b6-cd0a-4885-8b00-9736f456a25f\") " pod="openstack/ceilometer-0" Sep 29 19:26:49 crc kubenswrapper[4779]: I0929 19:26:49.077939 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xjdtg\" (UniqueName: \"kubernetes.io/projected/e7f029b6-cd0a-4885-8b00-9736f456a25f-kube-api-access-xjdtg\") pod \"ceilometer-0\" (UID: \"e7f029b6-cd0a-4885-8b00-9736f456a25f\") " pod="openstack/ceilometer-0" Sep 29 19:26:49 crc kubenswrapper[4779]: I0929 19:26:49.077972 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e7f029b6-cd0a-4885-8b00-9736f456a25f-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"e7f029b6-cd0a-4885-8b00-9736f456a25f\") " pod="openstack/ceilometer-0" Sep 29 19:26:49 crc kubenswrapper[4779]: I0929 19:26:49.078001 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/e7f029b6-cd0a-4885-8b00-9736f456a25f-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"e7f029b6-cd0a-4885-8b00-9736f456a25f\") " pod="openstack/ceilometer-0" Sep 29 19:26:49 crc kubenswrapper[4779]: I0929 19:26:49.078905 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e7f029b6-cd0a-4885-8b00-9736f456a25f-config-data\") pod \"ceilometer-0\" (UID: \"e7f029b6-cd0a-4885-8b00-9736f456a25f\") " pod="openstack/ceilometer-0" Sep 29 19:26:49 crc kubenswrapper[4779]: I0929 19:26:49.078974 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e7f029b6-cd0a-4885-8b00-9736f456a25f-log-httpd\") pod \"ceilometer-0\" (UID: \"e7f029b6-cd0a-4885-8b00-9736f456a25f\") " pod="openstack/ceilometer-0" Sep 29 19:26:49 crc kubenswrapper[4779]: I0929 19:26:49.078996 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e7f029b6-cd0a-4885-8b00-9736f456a25f-run-httpd\") pod \"ceilometer-0\" (UID: \"e7f029b6-cd0a-4885-8b00-9736f456a25f\") " pod="openstack/ceilometer-0" Sep 29 19:26:49 crc kubenswrapper[4779]: I0929 19:26:49.079243 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e7f029b6-cd0a-4885-8b00-9736f456a25f-run-httpd\") pod \"ceilometer-0\" (UID: \"e7f029b6-cd0a-4885-8b00-9736f456a25f\") " pod="openstack/ceilometer-0" Sep 29 19:26:49 crc kubenswrapper[4779]: I0929 19:26:49.082682 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e7f029b6-cd0a-4885-8b00-9736f456a25f-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"e7f029b6-cd0a-4885-8b00-9736f456a25f\") " pod="openstack/ceilometer-0" Sep 29 19:26:49 crc kubenswrapper[4779]: I0929 19:26:49.085105 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e7f029b6-cd0a-4885-8b00-9736f456a25f-config-data\") pod \"ceilometer-0\" (UID: \"e7f029b6-cd0a-4885-8b00-9736f456a25f\") " pod="openstack/ceilometer-0" Sep 29 19:26:49 crc kubenswrapper[4779]: I0929 19:26:49.087045 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/e7f029b6-cd0a-4885-8b00-9736f456a25f-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"e7f029b6-cd0a-4885-8b00-9736f456a25f\") " pod="openstack/ceilometer-0" Sep 29 19:26:49 crc kubenswrapper[4779]: I0929 19:26:49.092697 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e7f029b6-cd0a-4885-8b00-9736f456a25f-scripts\") pod \"ceilometer-0\" (UID: \"e7f029b6-cd0a-4885-8b00-9736f456a25f\") " pod="openstack/ceilometer-0" Sep 29 19:26:49 crc kubenswrapper[4779]: I0929 19:26:49.099461 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xjdtg\" (UniqueName: \"kubernetes.io/projected/e7f029b6-cd0a-4885-8b00-9736f456a25f-kube-api-access-xjdtg\") pod \"ceilometer-0\" (UID: \"e7f029b6-cd0a-4885-8b00-9736f456a25f\") " pod="openstack/ceilometer-0" Sep 29 19:26:49 crc kubenswrapper[4779]: I0929 19:26:49.168294 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-z552g" event={"ID":"1ffdc490-1007-41e8-a410-97a78d400395","Type":"ContainerStarted","Data":"884b7083c272788693584552bc8512f7ab999b2bfeffc1ead266268aaa1801d5"} Sep 29 19:26:49 crc kubenswrapper[4779]: I0929 19:26:49.169631 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"a6f49188-efdd-4f27-ad02-4656f2cf5d11","Type":"ContainerStarted","Data":"ec52a6094bc21713c655d753a15df45cfacdbb46b543a97d091ddf8addce96f9"} Sep 29 19:26:49 crc kubenswrapper[4779]: I0929 19:26:49.172001 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Sep 29 19:26:49 crc kubenswrapper[4779]: I0929 19:26:49.172168 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Sep 29 19:26:49 crc kubenswrapper[4779]: I0929 19:26:49.191159 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 19:26:49 crc kubenswrapper[4779]: I0929 19:26:49.695921 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 29 19:26:49 crc kubenswrapper[4779]: I0929 19:26:49.804086 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5e620ad7-10e9-4b45-85f4-a4971dc70cd9" path="/var/lib/kubelet/pods/5e620ad7-10e9-4b45-85f4-a4971dc70cd9/volumes" Sep 29 19:26:50 crc kubenswrapper[4779]: I0929 19:26:50.181528 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"a6f49188-efdd-4f27-ad02-4656f2cf5d11","Type":"ContainerStarted","Data":"8ce5d0a49247a46134689d00564ad4a109c8115b4d29aad154a329f5f0b5452f"} Sep 29 19:26:50 crc kubenswrapper[4779]: I0929 19:26:50.183630 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e7f029b6-cd0a-4885-8b00-9736f456a25f","Type":"ContainerStarted","Data":"dddba1961d0d54d40b6fbd930b8c3bfeb31ddf20b329b9481f05067af6901a84"} Sep 29 19:26:50 crc kubenswrapper[4779]: I0929 19:26:50.221391 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-scheduler-0" podStartSLOduration=3.22137088 podStartE2EDuration="3.22137088s" podCreationTimestamp="2025-09-29 19:26:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 19:26:50.211836349 +0000 UTC m=+1121.096261449" watchObservedRunningTime="2025-09-29 19:26:50.22137088 +0000 UTC m=+1121.105795980" Sep 29 19:26:50 crc kubenswrapper[4779]: I0929 19:26:50.815998 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/cinder-api-0" Sep 29 19:26:51 crc kubenswrapper[4779]: I0929 19:26:51.199790 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e7f029b6-cd0a-4885-8b00-9736f456a25f","Type":"ContainerStarted","Data":"7a9b5ee7ac678f7e1b911af5318b4dc65f9e25ed9e33df16bd511f74fcb297d0"} Sep 29 19:26:51 crc kubenswrapper[4779]: I0929 19:26:51.199823 4779 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Sep 29 19:26:51 crc kubenswrapper[4779]: I0929 19:26:51.200152 4779 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Sep 29 19:26:51 crc kubenswrapper[4779]: I0929 19:26:51.444839 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Sep 29 19:26:51 crc kubenswrapper[4779]: I0929 19:26:51.863867 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Sep 29 19:26:52 crc kubenswrapper[4779]: I0929 19:26:52.236979 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e7f029b6-cd0a-4885-8b00-9736f456a25f","Type":"ContainerStarted","Data":"2c05ce90bedb395f9e2f196356ef7002c07f86cf21ea1ec77715b2283bf8c9b0"} Sep 29 19:26:52 crc kubenswrapper[4779]: I0929 19:26:52.237974 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e7f029b6-cd0a-4885-8b00-9736f456a25f","Type":"ContainerStarted","Data":"98e4880cb9252b9f706dda96514fb26b566cdf3a8a6bb0f95a72660a5443ba12"} Sep 29 19:26:52 crc kubenswrapper[4779]: I0929 19:26:52.495029 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-scheduler-0" Sep 29 19:26:53 crc kubenswrapper[4779]: I0929 19:26:53.271878 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e7f029b6-cd0a-4885-8b00-9736f456a25f","Type":"ContainerStarted","Data":"0c9b3188118be75b06b14b2bcf102383e6ae2dc90d08294e8966c8e35efe9633"} Sep 29 19:26:53 crc kubenswrapper[4779]: I0929 19:26:53.272593 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Sep 29 19:26:53 crc kubenswrapper[4779]: I0929 19:26:53.307848 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.080969446 podStartE2EDuration="5.307825309s" podCreationTimestamp="2025-09-29 19:26:48 +0000 UTC" firstStartedPulling="2025-09-29 19:26:49.724696293 +0000 UTC m=+1120.609121403" lastFinishedPulling="2025-09-29 19:26:52.951552166 +0000 UTC m=+1123.835977266" observedRunningTime="2025-09-29 19:26:53.292200433 +0000 UTC m=+1124.176625533" watchObservedRunningTime="2025-09-29 19:26:53.307825309 +0000 UTC m=+1124.192250419" Sep 29 19:26:53 crc kubenswrapper[4779]: I0929 19:26:53.373652 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Sep 29 19:26:53 crc kubenswrapper[4779]: I0929 19:26:53.460914 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Sep 29 19:26:53 crc kubenswrapper[4779]: I0929 19:26:53.460953 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Sep 29 19:26:53 crc kubenswrapper[4779]: I0929 19:26:53.495501 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Sep 29 19:26:53 crc kubenswrapper[4779]: I0929 19:26:53.525528 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Sep 29 19:26:54 crc kubenswrapper[4779]: I0929 19:26:54.284420 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Sep 29 19:26:54 crc kubenswrapper[4779]: I0929 19:26:54.284781 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Sep 29 19:26:55 crc kubenswrapper[4779]: I0929 19:26:55.292263 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="e7f029b6-cd0a-4885-8b00-9736f456a25f" containerName="ceilometer-central-agent" containerID="cri-o://7a9b5ee7ac678f7e1b911af5318b4dc65f9e25ed9e33df16bd511f74fcb297d0" gracePeriod=30 Sep 29 19:26:55 crc kubenswrapper[4779]: I0929 19:26:55.292446 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="e7f029b6-cd0a-4885-8b00-9736f456a25f" containerName="proxy-httpd" containerID="cri-o://0c9b3188118be75b06b14b2bcf102383e6ae2dc90d08294e8966c8e35efe9633" gracePeriod=30 Sep 29 19:26:55 crc kubenswrapper[4779]: I0929 19:26:55.292500 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="e7f029b6-cd0a-4885-8b00-9736f456a25f" containerName="sg-core" containerID="cri-o://2c05ce90bedb395f9e2f196356ef7002c07f86cf21ea1ec77715b2283bf8c9b0" gracePeriod=30 Sep 29 19:26:55 crc kubenswrapper[4779]: I0929 19:26:55.292541 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="e7f029b6-cd0a-4885-8b00-9736f456a25f" containerName="ceilometer-notification-agent" containerID="cri-o://98e4880cb9252b9f706dda96514fb26b566cdf3a8a6bb0f95a72660a5443ba12" gracePeriod=30 Sep 29 19:26:56 crc kubenswrapper[4779]: I0929 19:26:56.162601 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Sep 29 19:26:56 crc kubenswrapper[4779]: I0929 19:26:56.164715 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Sep 29 19:26:56 crc kubenswrapper[4779]: I0929 19:26:56.312566 4779 generic.go:334] "Generic (PLEG): container finished" podID="e7f029b6-cd0a-4885-8b00-9736f456a25f" containerID="0c9b3188118be75b06b14b2bcf102383e6ae2dc90d08294e8966c8e35efe9633" exitCode=0 Sep 29 19:26:56 crc kubenswrapper[4779]: I0929 19:26:56.312609 4779 generic.go:334] "Generic (PLEG): container finished" podID="e7f029b6-cd0a-4885-8b00-9736f456a25f" containerID="2c05ce90bedb395f9e2f196356ef7002c07f86cf21ea1ec77715b2283bf8c9b0" exitCode=2 Sep 29 19:26:56 crc kubenswrapper[4779]: I0929 19:26:56.312618 4779 generic.go:334] "Generic (PLEG): container finished" podID="e7f029b6-cd0a-4885-8b00-9736f456a25f" containerID="98e4880cb9252b9f706dda96514fb26b566cdf3a8a6bb0f95a72660a5443ba12" exitCode=0 Sep 29 19:26:56 crc kubenswrapper[4779]: I0929 19:26:56.313669 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e7f029b6-cd0a-4885-8b00-9736f456a25f","Type":"ContainerDied","Data":"0c9b3188118be75b06b14b2bcf102383e6ae2dc90d08294e8966c8e35efe9633"} Sep 29 19:26:56 crc kubenswrapper[4779]: I0929 19:26:56.313707 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e7f029b6-cd0a-4885-8b00-9736f456a25f","Type":"ContainerDied","Data":"2c05ce90bedb395f9e2f196356ef7002c07f86cf21ea1ec77715b2283bf8c9b0"} Sep 29 19:26:56 crc kubenswrapper[4779]: I0929 19:26:56.313722 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e7f029b6-cd0a-4885-8b00-9736f456a25f","Type":"ContainerDied","Data":"98e4880cb9252b9f706dda96514fb26b566cdf3a8a6bb0f95a72660a5443ba12"} Sep 29 19:26:57 crc kubenswrapper[4779]: I0929 19:26:57.327711 4779 generic.go:334] "Generic (PLEG): container finished" podID="e7f029b6-cd0a-4885-8b00-9736f456a25f" containerID="7a9b5ee7ac678f7e1b911af5318b4dc65f9e25ed9e33df16bd511f74fcb297d0" exitCode=0 Sep 29 19:26:57 crc kubenswrapper[4779]: I0929 19:26:57.327789 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e7f029b6-cd0a-4885-8b00-9736f456a25f","Type":"ContainerDied","Data":"7a9b5ee7ac678f7e1b911af5318b4dc65f9e25ed9e33df16bd511f74fcb297d0"} Sep 29 19:26:57 crc kubenswrapper[4779]: I0929 19:26:57.714889 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-scheduler-0" Sep 29 19:26:59 crc kubenswrapper[4779]: I0929 19:26:59.582091 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 19:26:59 crc kubenswrapper[4779]: I0929 19:26:59.709020 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e7f029b6-cd0a-4885-8b00-9736f456a25f-scripts\") pod \"e7f029b6-cd0a-4885-8b00-9736f456a25f\" (UID: \"e7f029b6-cd0a-4885-8b00-9736f456a25f\") " Sep 29 19:26:59 crc kubenswrapper[4779]: I0929 19:26:59.709078 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xjdtg\" (UniqueName: \"kubernetes.io/projected/e7f029b6-cd0a-4885-8b00-9736f456a25f-kube-api-access-xjdtg\") pod \"e7f029b6-cd0a-4885-8b00-9736f456a25f\" (UID: \"e7f029b6-cd0a-4885-8b00-9736f456a25f\") " Sep 29 19:26:59 crc kubenswrapper[4779]: I0929 19:26:59.709094 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e7f029b6-cd0a-4885-8b00-9736f456a25f-combined-ca-bundle\") pod \"e7f029b6-cd0a-4885-8b00-9736f456a25f\" (UID: \"e7f029b6-cd0a-4885-8b00-9736f456a25f\") " Sep 29 19:26:59 crc kubenswrapper[4779]: I0929 19:26:59.709851 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/e7f029b6-cd0a-4885-8b00-9736f456a25f-sg-core-conf-yaml\") pod \"e7f029b6-cd0a-4885-8b00-9736f456a25f\" (UID: \"e7f029b6-cd0a-4885-8b00-9736f456a25f\") " Sep 29 19:26:59 crc kubenswrapper[4779]: I0929 19:26:59.709873 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e7f029b6-cd0a-4885-8b00-9736f456a25f-config-data\") pod \"e7f029b6-cd0a-4885-8b00-9736f456a25f\" (UID: \"e7f029b6-cd0a-4885-8b00-9736f456a25f\") " Sep 29 19:26:59 crc kubenswrapper[4779]: I0929 19:26:59.709893 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e7f029b6-cd0a-4885-8b00-9736f456a25f-run-httpd\") pod \"e7f029b6-cd0a-4885-8b00-9736f456a25f\" (UID: \"e7f029b6-cd0a-4885-8b00-9736f456a25f\") " Sep 29 19:26:59 crc kubenswrapper[4779]: I0929 19:26:59.709917 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e7f029b6-cd0a-4885-8b00-9736f456a25f-log-httpd\") pod \"e7f029b6-cd0a-4885-8b00-9736f456a25f\" (UID: \"e7f029b6-cd0a-4885-8b00-9736f456a25f\") " Sep 29 19:26:59 crc kubenswrapper[4779]: I0929 19:26:59.710301 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e7f029b6-cd0a-4885-8b00-9736f456a25f-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "e7f029b6-cd0a-4885-8b00-9736f456a25f" (UID: "e7f029b6-cd0a-4885-8b00-9736f456a25f"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 19:26:59 crc kubenswrapper[4779]: I0929 19:26:59.710342 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e7f029b6-cd0a-4885-8b00-9736f456a25f-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "e7f029b6-cd0a-4885-8b00-9736f456a25f" (UID: "e7f029b6-cd0a-4885-8b00-9736f456a25f"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 19:26:59 crc kubenswrapper[4779]: I0929 19:26:59.710759 4779 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e7f029b6-cd0a-4885-8b00-9736f456a25f-run-httpd\") on node \"crc\" DevicePath \"\"" Sep 29 19:26:59 crc kubenswrapper[4779]: I0929 19:26:59.710777 4779 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e7f029b6-cd0a-4885-8b00-9736f456a25f-log-httpd\") on node \"crc\" DevicePath \"\"" Sep 29 19:26:59 crc kubenswrapper[4779]: I0929 19:26:59.715443 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e7f029b6-cd0a-4885-8b00-9736f456a25f-scripts" (OuterVolumeSpecName: "scripts") pod "e7f029b6-cd0a-4885-8b00-9736f456a25f" (UID: "e7f029b6-cd0a-4885-8b00-9736f456a25f"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:26:59 crc kubenswrapper[4779]: I0929 19:26:59.715471 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e7f029b6-cd0a-4885-8b00-9736f456a25f-kube-api-access-xjdtg" (OuterVolumeSpecName: "kube-api-access-xjdtg") pod "e7f029b6-cd0a-4885-8b00-9736f456a25f" (UID: "e7f029b6-cd0a-4885-8b00-9736f456a25f"). InnerVolumeSpecName "kube-api-access-xjdtg". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:26:59 crc kubenswrapper[4779]: I0929 19:26:59.743101 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e7f029b6-cd0a-4885-8b00-9736f456a25f-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "e7f029b6-cd0a-4885-8b00-9736f456a25f" (UID: "e7f029b6-cd0a-4885-8b00-9736f456a25f"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:26:59 crc kubenswrapper[4779]: I0929 19:26:59.777237 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e7f029b6-cd0a-4885-8b00-9736f456a25f-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e7f029b6-cd0a-4885-8b00-9736f456a25f" (UID: "e7f029b6-cd0a-4885-8b00-9736f456a25f"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:26:59 crc kubenswrapper[4779]: I0929 19:26:59.812532 4779 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/e7f029b6-cd0a-4885-8b00-9736f456a25f-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Sep 29 19:26:59 crc kubenswrapper[4779]: I0929 19:26:59.812720 4779 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e7f029b6-cd0a-4885-8b00-9736f456a25f-scripts\") on node \"crc\" DevicePath \"\"" Sep 29 19:26:59 crc kubenswrapper[4779]: I0929 19:26:59.812805 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xjdtg\" (UniqueName: \"kubernetes.io/projected/e7f029b6-cd0a-4885-8b00-9736f456a25f-kube-api-access-xjdtg\") on node \"crc\" DevicePath \"\"" Sep 29 19:26:59 crc kubenswrapper[4779]: I0929 19:26:59.812880 4779 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e7f029b6-cd0a-4885-8b00-9736f456a25f-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 19:26:59 crc kubenswrapper[4779]: I0929 19:26:59.812894 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e7f029b6-cd0a-4885-8b00-9736f456a25f-config-data" (OuterVolumeSpecName: "config-data") pod "e7f029b6-cd0a-4885-8b00-9736f456a25f" (UID: "e7f029b6-cd0a-4885-8b00-9736f456a25f"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:26:59 crc kubenswrapper[4779]: I0929 19:26:59.914500 4779 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e7f029b6-cd0a-4885-8b00-9736f456a25f-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 19:27:00 crc kubenswrapper[4779]: I0929 19:27:00.365097 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-z552g" event={"ID":"1ffdc490-1007-41e8-a410-97a78d400395","Type":"ContainerStarted","Data":"080b50a87bcf31fa21cbd13bfc9fc484d061841d83b28176dad17028cfa30326"} Sep 29 19:27:00 crc kubenswrapper[4779]: I0929 19:27:00.369382 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e7f029b6-cd0a-4885-8b00-9736f456a25f","Type":"ContainerDied","Data":"dddba1961d0d54d40b6fbd930b8c3bfeb31ddf20b329b9481f05067af6901a84"} Sep 29 19:27:00 crc kubenswrapper[4779]: I0929 19:27:00.369446 4779 scope.go:117] "RemoveContainer" containerID="0c9b3188118be75b06b14b2bcf102383e6ae2dc90d08294e8966c8e35efe9633" Sep 29 19:27:00 crc kubenswrapper[4779]: I0929 19:27:00.369451 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 19:27:00 crc kubenswrapper[4779]: I0929 19:27:00.395188 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-db-sync-z552g" podStartSLOduration=2.571891643 podStartE2EDuration="13.395164247s" podCreationTimestamp="2025-09-29 19:26:47 +0000 UTC" firstStartedPulling="2025-09-29 19:26:48.541497261 +0000 UTC m=+1119.425922361" lastFinishedPulling="2025-09-29 19:26:59.364769835 +0000 UTC m=+1130.249194965" observedRunningTime="2025-09-29 19:27:00.389380349 +0000 UTC m=+1131.273805469" watchObservedRunningTime="2025-09-29 19:27:00.395164247 +0000 UTC m=+1131.279589357" Sep 29 19:27:00 crc kubenswrapper[4779]: I0929 19:27:00.395796 4779 scope.go:117] "RemoveContainer" containerID="2c05ce90bedb395f9e2f196356ef7002c07f86cf21ea1ec77715b2283bf8c9b0" Sep 29 19:27:00 crc kubenswrapper[4779]: I0929 19:27:00.421809 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Sep 29 19:27:00 crc kubenswrapper[4779]: I0929 19:27:00.438872 4779 scope.go:117] "RemoveContainer" containerID="98e4880cb9252b9f706dda96514fb26b566cdf3a8a6bb0f95a72660a5443ba12" Sep 29 19:27:00 crc kubenswrapper[4779]: I0929 19:27:00.443336 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Sep 29 19:27:00 crc kubenswrapper[4779]: I0929 19:27:00.457931 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Sep 29 19:27:00 crc kubenswrapper[4779]: E0929 19:27:00.458288 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e7f029b6-cd0a-4885-8b00-9736f456a25f" containerName="ceilometer-notification-agent" Sep 29 19:27:00 crc kubenswrapper[4779]: I0929 19:27:00.458304 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="e7f029b6-cd0a-4885-8b00-9736f456a25f" containerName="ceilometer-notification-agent" Sep 29 19:27:00 crc kubenswrapper[4779]: E0929 19:27:00.458334 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e7f029b6-cd0a-4885-8b00-9736f456a25f" containerName="ceilometer-central-agent" Sep 29 19:27:00 crc kubenswrapper[4779]: I0929 19:27:00.458341 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="e7f029b6-cd0a-4885-8b00-9736f456a25f" containerName="ceilometer-central-agent" Sep 29 19:27:00 crc kubenswrapper[4779]: E0929 19:27:00.458356 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e7f029b6-cd0a-4885-8b00-9736f456a25f" containerName="sg-core" Sep 29 19:27:00 crc kubenswrapper[4779]: I0929 19:27:00.458363 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="e7f029b6-cd0a-4885-8b00-9736f456a25f" containerName="sg-core" Sep 29 19:27:00 crc kubenswrapper[4779]: E0929 19:27:00.458378 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e7f029b6-cd0a-4885-8b00-9736f456a25f" containerName="proxy-httpd" Sep 29 19:27:00 crc kubenswrapper[4779]: I0929 19:27:00.458384 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="e7f029b6-cd0a-4885-8b00-9736f456a25f" containerName="proxy-httpd" Sep 29 19:27:00 crc kubenswrapper[4779]: I0929 19:27:00.458544 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="e7f029b6-cd0a-4885-8b00-9736f456a25f" containerName="ceilometer-central-agent" Sep 29 19:27:00 crc kubenswrapper[4779]: I0929 19:27:00.458555 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="e7f029b6-cd0a-4885-8b00-9736f456a25f" containerName="proxy-httpd" Sep 29 19:27:00 crc kubenswrapper[4779]: I0929 19:27:00.458575 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="e7f029b6-cd0a-4885-8b00-9736f456a25f" containerName="ceilometer-notification-agent" Sep 29 19:27:00 crc kubenswrapper[4779]: I0929 19:27:00.458589 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="e7f029b6-cd0a-4885-8b00-9736f456a25f" containerName="sg-core" Sep 29 19:27:00 crc kubenswrapper[4779]: I0929 19:27:00.460102 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 19:27:00 crc kubenswrapper[4779]: I0929 19:27:00.465751 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 29 19:27:00 crc kubenswrapper[4779]: I0929 19:27:00.470816 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Sep 29 19:27:00 crc kubenswrapper[4779]: I0929 19:27:00.472857 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Sep 29 19:27:00 crc kubenswrapper[4779]: I0929 19:27:00.476843 4779 scope.go:117] "RemoveContainer" containerID="7a9b5ee7ac678f7e1b911af5318b4dc65f9e25ed9e33df16bd511f74fcb297d0" Sep 29 19:27:00 crc kubenswrapper[4779]: I0929 19:27:00.525771 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a5cb4dde-7de2-43a6-aeeb-92350ca1454d-log-httpd\") pod \"ceilometer-0\" (UID: \"a5cb4dde-7de2-43a6-aeeb-92350ca1454d\") " pod="openstack/ceilometer-0" Sep 29 19:27:00 crc kubenswrapper[4779]: I0929 19:27:00.525824 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a5cb4dde-7de2-43a6-aeeb-92350ca1454d-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"a5cb4dde-7de2-43a6-aeeb-92350ca1454d\") " pod="openstack/ceilometer-0" Sep 29 19:27:00 crc kubenswrapper[4779]: I0929 19:27:00.525922 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nvzl4\" (UniqueName: \"kubernetes.io/projected/a5cb4dde-7de2-43a6-aeeb-92350ca1454d-kube-api-access-nvzl4\") pod \"ceilometer-0\" (UID: \"a5cb4dde-7de2-43a6-aeeb-92350ca1454d\") " pod="openstack/ceilometer-0" Sep 29 19:27:00 crc kubenswrapper[4779]: I0929 19:27:00.525984 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a5cb4dde-7de2-43a6-aeeb-92350ca1454d-config-data\") pod \"ceilometer-0\" (UID: \"a5cb4dde-7de2-43a6-aeeb-92350ca1454d\") " pod="openstack/ceilometer-0" Sep 29 19:27:00 crc kubenswrapper[4779]: I0929 19:27:00.526021 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a5cb4dde-7de2-43a6-aeeb-92350ca1454d-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"a5cb4dde-7de2-43a6-aeeb-92350ca1454d\") " pod="openstack/ceilometer-0" Sep 29 19:27:00 crc kubenswrapper[4779]: I0929 19:27:00.526055 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a5cb4dde-7de2-43a6-aeeb-92350ca1454d-run-httpd\") pod \"ceilometer-0\" (UID: \"a5cb4dde-7de2-43a6-aeeb-92350ca1454d\") " pod="openstack/ceilometer-0" Sep 29 19:27:00 crc kubenswrapper[4779]: I0929 19:27:00.526117 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a5cb4dde-7de2-43a6-aeeb-92350ca1454d-scripts\") pod \"ceilometer-0\" (UID: \"a5cb4dde-7de2-43a6-aeeb-92350ca1454d\") " pod="openstack/ceilometer-0" Sep 29 19:27:00 crc kubenswrapper[4779]: I0929 19:27:00.627526 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a5cb4dde-7de2-43a6-aeeb-92350ca1454d-scripts\") pod \"ceilometer-0\" (UID: \"a5cb4dde-7de2-43a6-aeeb-92350ca1454d\") " pod="openstack/ceilometer-0" Sep 29 19:27:00 crc kubenswrapper[4779]: I0929 19:27:00.627601 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a5cb4dde-7de2-43a6-aeeb-92350ca1454d-log-httpd\") pod \"ceilometer-0\" (UID: \"a5cb4dde-7de2-43a6-aeeb-92350ca1454d\") " pod="openstack/ceilometer-0" Sep 29 19:27:00 crc kubenswrapper[4779]: I0929 19:27:00.627639 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a5cb4dde-7de2-43a6-aeeb-92350ca1454d-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"a5cb4dde-7de2-43a6-aeeb-92350ca1454d\") " pod="openstack/ceilometer-0" Sep 29 19:27:00 crc kubenswrapper[4779]: I0929 19:27:00.627680 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nvzl4\" (UniqueName: \"kubernetes.io/projected/a5cb4dde-7de2-43a6-aeeb-92350ca1454d-kube-api-access-nvzl4\") pod \"ceilometer-0\" (UID: \"a5cb4dde-7de2-43a6-aeeb-92350ca1454d\") " pod="openstack/ceilometer-0" Sep 29 19:27:00 crc kubenswrapper[4779]: I0929 19:27:00.627702 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a5cb4dde-7de2-43a6-aeeb-92350ca1454d-config-data\") pod \"ceilometer-0\" (UID: \"a5cb4dde-7de2-43a6-aeeb-92350ca1454d\") " pod="openstack/ceilometer-0" Sep 29 19:27:00 crc kubenswrapper[4779]: I0929 19:27:00.627720 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a5cb4dde-7de2-43a6-aeeb-92350ca1454d-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"a5cb4dde-7de2-43a6-aeeb-92350ca1454d\") " pod="openstack/ceilometer-0" Sep 29 19:27:00 crc kubenswrapper[4779]: I0929 19:27:00.627748 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a5cb4dde-7de2-43a6-aeeb-92350ca1454d-run-httpd\") pod \"ceilometer-0\" (UID: \"a5cb4dde-7de2-43a6-aeeb-92350ca1454d\") " pod="openstack/ceilometer-0" Sep 29 19:27:00 crc kubenswrapper[4779]: I0929 19:27:00.628151 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a5cb4dde-7de2-43a6-aeeb-92350ca1454d-run-httpd\") pod \"ceilometer-0\" (UID: \"a5cb4dde-7de2-43a6-aeeb-92350ca1454d\") " pod="openstack/ceilometer-0" Sep 29 19:27:00 crc kubenswrapper[4779]: I0929 19:27:00.628665 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a5cb4dde-7de2-43a6-aeeb-92350ca1454d-log-httpd\") pod \"ceilometer-0\" (UID: \"a5cb4dde-7de2-43a6-aeeb-92350ca1454d\") " pod="openstack/ceilometer-0" Sep 29 19:27:00 crc kubenswrapper[4779]: I0929 19:27:00.631759 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a5cb4dde-7de2-43a6-aeeb-92350ca1454d-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"a5cb4dde-7de2-43a6-aeeb-92350ca1454d\") " pod="openstack/ceilometer-0" Sep 29 19:27:00 crc kubenswrapper[4779]: I0929 19:27:00.632209 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a5cb4dde-7de2-43a6-aeeb-92350ca1454d-scripts\") pod \"ceilometer-0\" (UID: \"a5cb4dde-7de2-43a6-aeeb-92350ca1454d\") " pod="openstack/ceilometer-0" Sep 29 19:27:00 crc kubenswrapper[4779]: I0929 19:27:00.632653 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a5cb4dde-7de2-43a6-aeeb-92350ca1454d-config-data\") pod \"ceilometer-0\" (UID: \"a5cb4dde-7de2-43a6-aeeb-92350ca1454d\") " pod="openstack/ceilometer-0" Sep 29 19:27:00 crc kubenswrapper[4779]: I0929 19:27:00.646061 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a5cb4dde-7de2-43a6-aeeb-92350ca1454d-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"a5cb4dde-7de2-43a6-aeeb-92350ca1454d\") " pod="openstack/ceilometer-0" Sep 29 19:27:00 crc kubenswrapper[4779]: I0929 19:27:00.647556 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nvzl4\" (UniqueName: \"kubernetes.io/projected/a5cb4dde-7de2-43a6-aeeb-92350ca1454d-kube-api-access-nvzl4\") pod \"ceilometer-0\" (UID: \"a5cb4dde-7de2-43a6-aeeb-92350ca1454d\") " pod="openstack/ceilometer-0" Sep 29 19:27:00 crc kubenswrapper[4779]: I0929 19:27:00.795425 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 19:27:01 crc kubenswrapper[4779]: I0929 19:27:01.271196 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 29 19:27:01 crc kubenswrapper[4779]: W0929 19:27:01.284468 4779 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda5cb4dde_7de2_43a6_aeeb_92350ca1454d.slice/crio-4209017d2fbbbf63c3ea13fb158acc61da6e482290360c442860b2d15a6327bc WatchSource:0}: Error finding container 4209017d2fbbbf63c3ea13fb158acc61da6e482290360c442860b2d15a6327bc: Status 404 returned error can't find the container with id 4209017d2fbbbf63c3ea13fb158acc61da6e482290360c442860b2d15a6327bc Sep 29 19:27:01 crc kubenswrapper[4779]: I0929 19:27:01.384777 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a5cb4dde-7de2-43a6-aeeb-92350ca1454d","Type":"ContainerStarted","Data":"4209017d2fbbbf63c3ea13fb158acc61da6e482290360c442860b2d15a6327bc"} Sep 29 19:27:01 crc kubenswrapper[4779]: I0929 19:27:01.780453 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e7f029b6-cd0a-4885-8b00-9736f456a25f" path="/var/lib/kubelet/pods/e7f029b6-cd0a-4885-8b00-9736f456a25f/volumes" Sep 29 19:27:02 crc kubenswrapper[4779]: I0929 19:27:02.399415 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a5cb4dde-7de2-43a6-aeeb-92350ca1454d","Type":"ContainerStarted","Data":"40ad0a556d671ba8415d1985960925c2f64c1b7936b55e388189890aa35cafab"} Sep 29 19:27:05 crc kubenswrapper[4779]: I0929 19:27:05.429243 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a5cb4dde-7de2-43a6-aeeb-92350ca1454d","Type":"ContainerStarted","Data":"c097cdad9d4cff552d8123a6eb81980dfe8591805a605366e9cb9b66b052b3cd"} Sep 29 19:27:05 crc kubenswrapper[4779]: I0929 19:27:05.437146 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Sep 29 19:27:06 crc kubenswrapper[4779]: I0929 19:27:06.449506 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a5cb4dde-7de2-43a6-aeeb-92350ca1454d","Type":"ContainerStarted","Data":"a9a614f9b9a82cead8707f438ea1ffcfcc3ad18f7a132e9b01015550a7a0c3f1"} Sep 29 19:27:07 crc kubenswrapper[4779]: I0929 19:27:07.479676 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a5cb4dde-7de2-43a6-aeeb-92350ca1454d","Type":"ContainerStarted","Data":"50616d334c27a1bb1e1d894426d7c3990a2083878d342630350e2e2f13127eaa"} Sep 29 19:27:07 crc kubenswrapper[4779]: I0929 19:27:07.480000 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="a5cb4dde-7de2-43a6-aeeb-92350ca1454d" containerName="proxy-httpd" containerID="cri-o://50616d334c27a1bb1e1d894426d7c3990a2083878d342630350e2e2f13127eaa" gracePeriod=30 Sep 29 19:27:07 crc kubenswrapper[4779]: I0929 19:27:07.480081 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Sep 29 19:27:07 crc kubenswrapper[4779]: I0929 19:27:07.480063 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="a5cb4dde-7de2-43a6-aeeb-92350ca1454d" containerName="ceilometer-central-agent" containerID="cri-o://40ad0a556d671ba8415d1985960925c2f64c1b7936b55e388189890aa35cafab" gracePeriod=30 Sep 29 19:27:07 crc kubenswrapper[4779]: I0929 19:27:07.480007 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="a5cb4dde-7de2-43a6-aeeb-92350ca1454d" containerName="ceilometer-notification-agent" containerID="cri-o://c097cdad9d4cff552d8123a6eb81980dfe8591805a605366e9cb9b66b052b3cd" gracePeriod=30 Sep 29 19:27:07 crc kubenswrapper[4779]: I0929 19:27:07.480044 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="a5cb4dde-7de2-43a6-aeeb-92350ca1454d" containerName="sg-core" containerID="cri-o://a9a614f9b9a82cead8707f438ea1ffcfcc3ad18f7a132e9b01015550a7a0c3f1" gracePeriod=30 Sep 29 19:27:07 crc kubenswrapper[4779]: I0929 19:27:07.523070 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=1.675033408 podStartE2EDuration="7.523026071s" podCreationTimestamp="2025-09-29 19:27:00 +0000 UTC" firstStartedPulling="2025-09-29 19:27:01.288707695 +0000 UTC m=+1132.173132795" lastFinishedPulling="2025-09-29 19:27:07.136700318 +0000 UTC m=+1138.021125458" observedRunningTime="2025-09-29 19:27:07.511778724 +0000 UTC m=+1138.396203824" watchObservedRunningTime="2025-09-29 19:27:07.523026071 +0000 UTC m=+1138.407451191" Sep 29 19:27:08 crc kubenswrapper[4779]: I0929 19:27:08.490100 4779 generic.go:334] "Generic (PLEG): container finished" podID="a5cb4dde-7de2-43a6-aeeb-92350ca1454d" containerID="a9a614f9b9a82cead8707f438ea1ffcfcc3ad18f7a132e9b01015550a7a0c3f1" exitCode=2 Sep 29 19:27:08 crc kubenswrapper[4779]: I0929 19:27:08.490357 4779 generic.go:334] "Generic (PLEG): container finished" podID="a5cb4dde-7de2-43a6-aeeb-92350ca1454d" containerID="c097cdad9d4cff552d8123a6eb81980dfe8591805a605366e9cb9b66b052b3cd" exitCode=0 Sep 29 19:27:08 crc kubenswrapper[4779]: I0929 19:27:08.490157 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a5cb4dde-7de2-43a6-aeeb-92350ca1454d","Type":"ContainerDied","Data":"a9a614f9b9a82cead8707f438ea1ffcfcc3ad18f7a132e9b01015550a7a0c3f1"} Sep 29 19:27:08 crc kubenswrapper[4779]: I0929 19:27:08.490385 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a5cb4dde-7de2-43a6-aeeb-92350ca1454d","Type":"ContainerDied","Data":"c097cdad9d4cff552d8123a6eb81980dfe8591805a605366e9cb9b66b052b3cd"} Sep 29 19:27:10 crc kubenswrapper[4779]: I0929 19:27:10.514454 4779 generic.go:334] "Generic (PLEG): container finished" podID="a5cb4dde-7de2-43a6-aeeb-92350ca1454d" containerID="40ad0a556d671ba8415d1985960925c2f64c1b7936b55e388189890aa35cafab" exitCode=0 Sep 29 19:27:10 crc kubenswrapper[4779]: I0929 19:27:10.514513 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a5cb4dde-7de2-43a6-aeeb-92350ca1454d","Type":"ContainerDied","Data":"40ad0a556d671ba8415d1985960925c2f64c1b7936b55e388189890aa35cafab"} Sep 29 19:27:11 crc kubenswrapper[4779]: I0929 19:27:11.526891 4779 generic.go:334] "Generic (PLEG): container finished" podID="1ffdc490-1007-41e8-a410-97a78d400395" containerID="080b50a87bcf31fa21cbd13bfc9fc484d061841d83b28176dad17028cfa30326" exitCode=0 Sep 29 19:27:11 crc kubenswrapper[4779]: I0929 19:27:11.526941 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-z552g" event={"ID":"1ffdc490-1007-41e8-a410-97a78d400395","Type":"ContainerDied","Data":"080b50a87bcf31fa21cbd13bfc9fc484d061841d83b28176dad17028cfa30326"} Sep 29 19:27:13 crc kubenswrapper[4779]: I0929 19:27:13.063110 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-z552g" Sep 29 19:27:13 crc kubenswrapper[4779]: I0929 19:27:13.195124 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1ffdc490-1007-41e8-a410-97a78d400395-combined-ca-bundle\") pod \"1ffdc490-1007-41e8-a410-97a78d400395\" (UID: \"1ffdc490-1007-41e8-a410-97a78d400395\") " Sep 29 19:27:13 crc kubenswrapper[4779]: I0929 19:27:13.195191 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1ffdc490-1007-41e8-a410-97a78d400395-config-data\") pod \"1ffdc490-1007-41e8-a410-97a78d400395\" (UID: \"1ffdc490-1007-41e8-a410-97a78d400395\") " Sep 29 19:27:13 crc kubenswrapper[4779]: I0929 19:27:13.195257 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kvms6\" (UniqueName: \"kubernetes.io/projected/1ffdc490-1007-41e8-a410-97a78d400395-kube-api-access-kvms6\") pod \"1ffdc490-1007-41e8-a410-97a78d400395\" (UID: \"1ffdc490-1007-41e8-a410-97a78d400395\") " Sep 29 19:27:13 crc kubenswrapper[4779]: I0929 19:27:13.195395 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1ffdc490-1007-41e8-a410-97a78d400395-scripts\") pod \"1ffdc490-1007-41e8-a410-97a78d400395\" (UID: \"1ffdc490-1007-41e8-a410-97a78d400395\") " Sep 29 19:27:13 crc kubenswrapper[4779]: I0929 19:27:13.202435 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1ffdc490-1007-41e8-a410-97a78d400395-scripts" (OuterVolumeSpecName: "scripts") pod "1ffdc490-1007-41e8-a410-97a78d400395" (UID: "1ffdc490-1007-41e8-a410-97a78d400395"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:27:13 crc kubenswrapper[4779]: I0929 19:27:13.210731 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1ffdc490-1007-41e8-a410-97a78d400395-kube-api-access-kvms6" (OuterVolumeSpecName: "kube-api-access-kvms6") pod "1ffdc490-1007-41e8-a410-97a78d400395" (UID: "1ffdc490-1007-41e8-a410-97a78d400395"). InnerVolumeSpecName "kube-api-access-kvms6". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:27:13 crc kubenswrapper[4779]: I0929 19:27:13.234655 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1ffdc490-1007-41e8-a410-97a78d400395-config-data" (OuterVolumeSpecName: "config-data") pod "1ffdc490-1007-41e8-a410-97a78d400395" (UID: "1ffdc490-1007-41e8-a410-97a78d400395"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:27:13 crc kubenswrapper[4779]: I0929 19:27:13.237856 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1ffdc490-1007-41e8-a410-97a78d400395-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "1ffdc490-1007-41e8-a410-97a78d400395" (UID: "1ffdc490-1007-41e8-a410-97a78d400395"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:27:13 crc kubenswrapper[4779]: I0929 19:27:13.300284 4779 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1ffdc490-1007-41e8-a410-97a78d400395-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 19:27:13 crc kubenswrapper[4779]: I0929 19:27:13.300373 4779 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1ffdc490-1007-41e8-a410-97a78d400395-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 19:27:13 crc kubenswrapper[4779]: I0929 19:27:13.300393 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kvms6\" (UniqueName: \"kubernetes.io/projected/1ffdc490-1007-41e8-a410-97a78d400395-kube-api-access-kvms6\") on node \"crc\" DevicePath \"\"" Sep 29 19:27:13 crc kubenswrapper[4779]: I0929 19:27:13.300410 4779 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1ffdc490-1007-41e8-a410-97a78d400395-scripts\") on node \"crc\" DevicePath \"\"" Sep 29 19:27:13 crc kubenswrapper[4779]: I0929 19:27:13.561620 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-z552g" event={"ID":"1ffdc490-1007-41e8-a410-97a78d400395","Type":"ContainerDied","Data":"884b7083c272788693584552bc8512f7ab999b2bfeffc1ead266268aaa1801d5"} Sep 29 19:27:13 crc kubenswrapper[4779]: I0929 19:27:13.561680 4779 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="884b7083c272788693584552bc8512f7ab999b2bfeffc1ead266268aaa1801d5" Sep 29 19:27:13 crc kubenswrapper[4779]: I0929 19:27:13.561690 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-z552g" Sep 29 19:27:13 crc kubenswrapper[4779]: I0929 19:27:13.652584 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-0"] Sep 29 19:27:13 crc kubenswrapper[4779]: E0929 19:27:13.653004 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1ffdc490-1007-41e8-a410-97a78d400395" containerName="nova-cell0-conductor-db-sync" Sep 29 19:27:13 crc kubenswrapper[4779]: I0929 19:27:13.653026 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="1ffdc490-1007-41e8-a410-97a78d400395" containerName="nova-cell0-conductor-db-sync" Sep 29 19:27:13 crc kubenswrapper[4779]: I0929 19:27:13.653252 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="1ffdc490-1007-41e8-a410-97a78d400395" containerName="nova-cell0-conductor-db-sync" Sep 29 19:27:13 crc kubenswrapper[4779]: I0929 19:27:13.653878 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Sep 29 19:27:13 crc kubenswrapper[4779]: I0929 19:27:13.657775 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Sep 29 19:27:13 crc kubenswrapper[4779]: I0929 19:27:13.659556 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-nova-dockercfg-qn6vq" Sep 29 19:27:13 crc kubenswrapper[4779]: I0929 19:27:13.669041 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Sep 29 19:27:13 crc kubenswrapper[4779]: I0929 19:27:13.811591 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7b53aa08-b1e7-4e69-86f3-830bb5c84002-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"7b53aa08-b1e7-4e69-86f3-830bb5c84002\") " pod="openstack/nova-cell0-conductor-0" Sep 29 19:27:13 crc kubenswrapper[4779]: I0929 19:27:13.811830 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7b53aa08-b1e7-4e69-86f3-830bb5c84002-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"7b53aa08-b1e7-4e69-86f3-830bb5c84002\") " pod="openstack/nova-cell0-conductor-0" Sep 29 19:27:13 crc kubenswrapper[4779]: I0929 19:27:13.811890 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rk5rd\" (UniqueName: \"kubernetes.io/projected/7b53aa08-b1e7-4e69-86f3-830bb5c84002-kube-api-access-rk5rd\") pod \"nova-cell0-conductor-0\" (UID: \"7b53aa08-b1e7-4e69-86f3-830bb5c84002\") " pod="openstack/nova-cell0-conductor-0" Sep 29 19:27:13 crc kubenswrapper[4779]: I0929 19:27:13.915959 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7b53aa08-b1e7-4e69-86f3-830bb5c84002-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"7b53aa08-b1e7-4e69-86f3-830bb5c84002\") " pod="openstack/nova-cell0-conductor-0" Sep 29 19:27:13 crc kubenswrapper[4779]: I0929 19:27:13.916165 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7b53aa08-b1e7-4e69-86f3-830bb5c84002-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"7b53aa08-b1e7-4e69-86f3-830bb5c84002\") " pod="openstack/nova-cell0-conductor-0" Sep 29 19:27:13 crc kubenswrapper[4779]: I0929 19:27:13.916422 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rk5rd\" (UniqueName: \"kubernetes.io/projected/7b53aa08-b1e7-4e69-86f3-830bb5c84002-kube-api-access-rk5rd\") pod \"nova-cell0-conductor-0\" (UID: \"7b53aa08-b1e7-4e69-86f3-830bb5c84002\") " pod="openstack/nova-cell0-conductor-0" Sep 29 19:27:13 crc kubenswrapper[4779]: I0929 19:27:13.925736 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7b53aa08-b1e7-4e69-86f3-830bb5c84002-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"7b53aa08-b1e7-4e69-86f3-830bb5c84002\") " pod="openstack/nova-cell0-conductor-0" Sep 29 19:27:13 crc kubenswrapper[4779]: I0929 19:27:13.926016 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7b53aa08-b1e7-4e69-86f3-830bb5c84002-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"7b53aa08-b1e7-4e69-86f3-830bb5c84002\") " pod="openstack/nova-cell0-conductor-0" Sep 29 19:27:13 crc kubenswrapper[4779]: I0929 19:27:13.956458 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rk5rd\" (UniqueName: \"kubernetes.io/projected/7b53aa08-b1e7-4e69-86f3-830bb5c84002-kube-api-access-rk5rd\") pod \"nova-cell0-conductor-0\" (UID: \"7b53aa08-b1e7-4e69-86f3-830bb5c84002\") " pod="openstack/nova-cell0-conductor-0" Sep 29 19:27:13 crc kubenswrapper[4779]: I0929 19:27:13.984419 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Sep 29 19:27:14 crc kubenswrapper[4779]: I0929 19:27:14.301246 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Sep 29 19:27:14 crc kubenswrapper[4779]: I0929 19:27:14.574692 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"7b53aa08-b1e7-4e69-86f3-830bb5c84002","Type":"ContainerStarted","Data":"bcee975134bc90c6c25542cb1227dc8da52a25f9b14493559205be5d9548eaa8"} Sep 29 19:27:14 crc kubenswrapper[4779]: I0929 19:27:14.574741 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"7b53aa08-b1e7-4e69-86f3-830bb5c84002","Type":"ContainerStarted","Data":"c167861a7117eedb639fdcbe73614b8c78d065b2e426f00c11755a3ab2989fec"} Sep 29 19:27:14 crc kubenswrapper[4779]: I0929 19:27:14.574860 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell0-conductor-0" Sep 29 19:27:14 crc kubenswrapper[4779]: I0929 19:27:14.592101 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-0" podStartSLOduration=1.59207661 podStartE2EDuration="1.59207661s" podCreationTimestamp="2025-09-29 19:27:13 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 19:27:14.591259918 +0000 UTC m=+1145.475685028" watchObservedRunningTime="2025-09-29 19:27:14.59207661 +0000 UTC m=+1145.476501760" Sep 29 19:27:24 crc kubenswrapper[4779]: I0929 19:27:24.037221 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell0-conductor-0" Sep 29 19:27:24 crc kubenswrapper[4779]: I0929 19:27:24.547264 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-cell-mapping-9jlbk"] Sep 29 19:27:24 crc kubenswrapper[4779]: I0929 19:27:24.548683 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-9jlbk" Sep 29 19:27:24 crc kubenswrapper[4779]: I0929 19:27:24.552635 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-manage-config-data" Sep 29 19:27:24 crc kubenswrapper[4779]: I0929 19:27:24.552785 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-manage-scripts" Sep 29 19:27:24 crc kubenswrapper[4779]: I0929 19:27:24.565629 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-cell-mapping-9jlbk"] Sep 29 19:27:24 crc kubenswrapper[4779]: I0929 19:27:24.645534 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bck58\" (UniqueName: \"kubernetes.io/projected/bf8c83fc-5e0d-41f8-b1f6-f7004e73ec39-kube-api-access-bck58\") pod \"nova-cell0-cell-mapping-9jlbk\" (UID: \"bf8c83fc-5e0d-41f8-b1f6-f7004e73ec39\") " pod="openstack/nova-cell0-cell-mapping-9jlbk" Sep 29 19:27:24 crc kubenswrapper[4779]: I0929 19:27:24.645854 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/bf8c83fc-5e0d-41f8-b1f6-f7004e73ec39-scripts\") pod \"nova-cell0-cell-mapping-9jlbk\" (UID: \"bf8c83fc-5e0d-41f8-b1f6-f7004e73ec39\") " pod="openstack/nova-cell0-cell-mapping-9jlbk" Sep 29 19:27:24 crc kubenswrapper[4779]: I0929 19:27:24.646001 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bf8c83fc-5e0d-41f8-b1f6-f7004e73ec39-config-data\") pod \"nova-cell0-cell-mapping-9jlbk\" (UID: \"bf8c83fc-5e0d-41f8-b1f6-f7004e73ec39\") " pod="openstack/nova-cell0-cell-mapping-9jlbk" Sep 29 19:27:24 crc kubenswrapper[4779]: I0929 19:27:24.646303 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bf8c83fc-5e0d-41f8-b1f6-f7004e73ec39-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-9jlbk\" (UID: \"bf8c83fc-5e0d-41f8-b1f6-f7004e73ec39\") " pod="openstack/nova-cell0-cell-mapping-9jlbk" Sep 29 19:27:24 crc kubenswrapper[4779]: I0929 19:27:24.748278 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bf8c83fc-5e0d-41f8-b1f6-f7004e73ec39-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-9jlbk\" (UID: \"bf8c83fc-5e0d-41f8-b1f6-f7004e73ec39\") " pod="openstack/nova-cell0-cell-mapping-9jlbk" Sep 29 19:27:24 crc kubenswrapper[4779]: I0929 19:27:24.748373 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bck58\" (UniqueName: \"kubernetes.io/projected/bf8c83fc-5e0d-41f8-b1f6-f7004e73ec39-kube-api-access-bck58\") pod \"nova-cell0-cell-mapping-9jlbk\" (UID: \"bf8c83fc-5e0d-41f8-b1f6-f7004e73ec39\") " pod="openstack/nova-cell0-cell-mapping-9jlbk" Sep 29 19:27:24 crc kubenswrapper[4779]: I0929 19:27:24.748436 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/bf8c83fc-5e0d-41f8-b1f6-f7004e73ec39-scripts\") pod \"nova-cell0-cell-mapping-9jlbk\" (UID: \"bf8c83fc-5e0d-41f8-b1f6-f7004e73ec39\") " pod="openstack/nova-cell0-cell-mapping-9jlbk" Sep 29 19:27:24 crc kubenswrapper[4779]: I0929 19:27:24.748459 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bf8c83fc-5e0d-41f8-b1f6-f7004e73ec39-config-data\") pod \"nova-cell0-cell-mapping-9jlbk\" (UID: \"bf8c83fc-5e0d-41f8-b1f6-f7004e73ec39\") " pod="openstack/nova-cell0-cell-mapping-9jlbk" Sep 29 19:27:24 crc kubenswrapper[4779]: I0929 19:27:24.755697 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/bf8c83fc-5e0d-41f8-b1f6-f7004e73ec39-scripts\") pod \"nova-cell0-cell-mapping-9jlbk\" (UID: \"bf8c83fc-5e0d-41f8-b1f6-f7004e73ec39\") " pod="openstack/nova-cell0-cell-mapping-9jlbk" Sep 29 19:27:24 crc kubenswrapper[4779]: I0929 19:27:24.755871 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bf8c83fc-5e0d-41f8-b1f6-f7004e73ec39-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-9jlbk\" (UID: \"bf8c83fc-5e0d-41f8-b1f6-f7004e73ec39\") " pod="openstack/nova-cell0-cell-mapping-9jlbk" Sep 29 19:27:24 crc kubenswrapper[4779]: I0929 19:27:24.773193 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bck58\" (UniqueName: \"kubernetes.io/projected/bf8c83fc-5e0d-41f8-b1f6-f7004e73ec39-kube-api-access-bck58\") pod \"nova-cell0-cell-mapping-9jlbk\" (UID: \"bf8c83fc-5e0d-41f8-b1f6-f7004e73ec39\") " pod="openstack/nova-cell0-cell-mapping-9jlbk" Sep 29 19:27:24 crc kubenswrapper[4779]: I0929 19:27:24.781960 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bf8c83fc-5e0d-41f8-b1f6-f7004e73ec39-config-data\") pod \"nova-cell0-cell-mapping-9jlbk\" (UID: \"bf8c83fc-5e0d-41f8-b1f6-f7004e73ec39\") " pod="openstack/nova-cell0-cell-mapping-9jlbk" Sep 29 19:27:24 crc kubenswrapper[4779]: I0929 19:27:24.806179 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Sep 29 19:27:24 crc kubenswrapper[4779]: I0929 19:27:24.807795 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Sep 29 19:27:24 crc kubenswrapper[4779]: I0929 19:27:24.813795 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Sep 29 19:27:24 crc kubenswrapper[4779]: I0929 19:27:24.815552 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Sep 29 19:27:24 crc kubenswrapper[4779]: I0929 19:27:24.832597 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Sep 29 19:27:24 crc kubenswrapper[4779]: I0929 19:27:24.833725 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Sep 29 19:27:24 crc kubenswrapper[4779]: I0929 19:27:24.835654 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-novncproxy-config-data" Sep 29 19:27:24 crc kubenswrapper[4779]: I0929 19:27:24.853853 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Sep 29 19:27:24 crc kubenswrapper[4779]: I0929 19:27:24.855097 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Sep 29 19:27:24 crc kubenswrapper[4779]: I0929 19:27:24.860005 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Sep 29 19:27:24 crc kubenswrapper[4779]: I0929 19:27:24.887460 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Sep 29 19:27:24 crc kubenswrapper[4779]: I0929 19:27:24.894829 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-9jlbk" Sep 29 19:27:24 crc kubenswrapper[4779]: I0929 19:27:24.913666 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Sep 29 19:27:24 crc kubenswrapper[4779]: I0929 19:27:24.951790 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bd57dbe5-edf2-4209-aec4-255ef19013ff-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"bd57dbe5-edf2-4209-aec4-255ef19013ff\") " pod="openstack/nova-cell1-novncproxy-0" Sep 29 19:27:24 crc kubenswrapper[4779]: I0929 19:27:24.952144 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d6ba8b5d-fee9-4010-b4ba-f6cf7b691a88-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"d6ba8b5d-fee9-4010-b4ba-f6cf7b691a88\") " pod="openstack/nova-api-0" Sep 29 19:27:24 crc kubenswrapper[4779]: I0929 19:27:24.952186 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/da54bd62-a0d1-438e-b7c2-dae26d204540-config-data\") pod \"nova-scheduler-0\" (UID: \"da54bd62-a0d1-438e-b7c2-dae26d204540\") " pod="openstack/nova-scheduler-0" Sep 29 19:27:24 crc kubenswrapper[4779]: I0929 19:27:24.952251 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d6ba8b5d-fee9-4010-b4ba-f6cf7b691a88-config-data\") pod \"nova-api-0\" (UID: \"d6ba8b5d-fee9-4010-b4ba-f6cf7b691a88\") " pod="openstack/nova-api-0" Sep 29 19:27:24 crc kubenswrapper[4779]: I0929 19:27:24.952343 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-skb58\" (UniqueName: \"kubernetes.io/projected/bd57dbe5-edf2-4209-aec4-255ef19013ff-kube-api-access-skb58\") pod \"nova-cell1-novncproxy-0\" (UID: \"bd57dbe5-edf2-4209-aec4-255ef19013ff\") " pod="openstack/nova-cell1-novncproxy-0" Sep 29 19:27:24 crc kubenswrapper[4779]: I0929 19:27:24.952390 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d6ba8b5d-fee9-4010-b4ba-f6cf7b691a88-logs\") pod \"nova-api-0\" (UID: \"d6ba8b5d-fee9-4010-b4ba-f6cf7b691a88\") " pod="openstack/nova-api-0" Sep 29 19:27:24 crc kubenswrapper[4779]: I0929 19:27:24.952427 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vr2zr\" (UniqueName: \"kubernetes.io/projected/da54bd62-a0d1-438e-b7c2-dae26d204540-kube-api-access-vr2zr\") pod \"nova-scheduler-0\" (UID: \"da54bd62-a0d1-438e-b7c2-dae26d204540\") " pod="openstack/nova-scheduler-0" Sep 29 19:27:24 crc kubenswrapper[4779]: I0929 19:27:24.952455 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bd57dbe5-edf2-4209-aec4-255ef19013ff-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"bd57dbe5-edf2-4209-aec4-255ef19013ff\") " pod="openstack/nova-cell1-novncproxy-0" Sep 29 19:27:24 crc kubenswrapper[4779]: I0929 19:27:24.952478 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jvsqf\" (UniqueName: \"kubernetes.io/projected/d6ba8b5d-fee9-4010-b4ba-f6cf7b691a88-kube-api-access-jvsqf\") pod \"nova-api-0\" (UID: \"d6ba8b5d-fee9-4010-b4ba-f6cf7b691a88\") " pod="openstack/nova-api-0" Sep 29 19:27:24 crc kubenswrapper[4779]: I0929 19:27:24.952533 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/da54bd62-a0d1-438e-b7c2-dae26d204540-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"da54bd62-a0d1-438e-b7c2-dae26d204540\") " pod="openstack/nova-scheduler-0" Sep 29 19:27:24 crc kubenswrapper[4779]: I0929 19:27:24.952699 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Sep 29 19:27:24 crc kubenswrapper[4779]: I0929 19:27:24.954711 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Sep 29 19:27:24 crc kubenswrapper[4779]: I0929 19:27:24.964641 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Sep 29 19:27:24 crc kubenswrapper[4779]: I0929 19:27:24.978784 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Sep 29 19:27:25 crc kubenswrapper[4779]: I0929 19:27:25.055300 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-skb58\" (UniqueName: \"kubernetes.io/projected/bd57dbe5-edf2-4209-aec4-255ef19013ff-kube-api-access-skb58\") pod \"nova-cell1-novncproxy-0\" (UID: \"bd57dbe5-edf2-4209-aec4-255ef19013ff\") " pod="openstack/nova-cell1-novncproxy-0" Sep 29 19:27:25 crc kubenswrapper[4779]: I0929 19:27:25.055360 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a8357f7c-f656-4c05-b8ab-cc93c82a1b7c-config-data\") pod \"nova-metadata-0\" (UID: \"a8357f7c-f656-4c05-b8ab-cc93c82a1b7c\") " pod="openstack/nova-metadata-0" Sep 29 19:27:25 crc kubenswrapper[4779]: I0929 19:27:25.055400 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d6ba8b5d-fee9-4010-b4ba-f6cf7b691a88-logs\") pod \"nova-api-0\" (UID: \"d6ba8b5d-fee9-4010-b4ba-f6cf7b691a88\") " pod="openstack/nova-api-0" Sep 29 19:27:25 crc kubenswrapper[4779]: I0929 19:27:25.055431 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vr2zr\" (UniqueName: \"kubernetes.io/projected/da54bd62-a0d1-438e-b7c2-dae26d204540-kube-api-access-vr2zr\") pod \"nova-scheduler-0\" (UID: \"da54bd62-a0d1-438e-b7c2-dae26d204540\") " pod="openstack/nova-scheduler-0" Sep 29 19:27:25 crc kubenswrapper[4779]: I0929 19:27:25.055453 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bd57dbe5-edf2-4209-aec4-255ef19013ff-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"bd57dbe5-edf2-4209-aec4-255ef19013ff\") " pod="openstack/nova-cell1-novncproxy-0" Sep 29 19:27:25 crc kubenswrapper[4779]: I0929 19:27:25.055472 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jvsqf\" (UniqueName: \"kubernetes.io/projected/d6ba8b5d-fee9-4010-b4ba-f6cf7b691a88-kube-api-access-jvsqf\") pod \"nova-api-0\" (UID: \"d6ba8b5d-fee9-4010-b4ba-f6cf7b691a88\") " pod="openstack/nova-api-0" Sep 29 19:27:25 crc kubenswrapper[4779]: I0929 19:27:25.055698 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/da54bd62-a0d1-438e-b7c2-dae26d204540-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"da54bd62-a0d1-438e-b7c2-dae26d204540\") " pod="openstack/nova-scheduler-0" Sep 29 19:27:25 crc kubenswrapper[4779]: I0929 19:27:25.055786 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bd57dbe5-edf2-4209-aec4-255ef19013ff-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"bd57dbe5-edf2-4209-aec4-255ef19013ff\") " pod="openstack/nova-cell1-novncproxy-0" Sep 29 19:27:25 crc kubenswrapper[4779]: I0929 19:27:25.055806 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d6ba8b5d-fee9-4010-b4ba-f6cf7b691a88-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"d6ba8b5d-fee9-4010-b4ba-f6cf7b691a88\") " pod="openstack/nova-api-0" Sep 29 19:27:25 crc kubenswrapper[4779]: I0929 19:27:25.055854 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/da54bd62-a0d1-438e-b7c2-dae26d204540-config-data\") pod \"nova-scheduler-0\" (UID: \"da54bd62-a0d1-438e-b7c2-dae26d204540\") " pod="openstack/nova-scheduler-0" Sep 29 19:27:25 crc kubenswrapper[4779]: I0929 19:27:25.055877 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d6ba8b5d-fee9-4010-b4ba-f6cf7b691a88-logs\") pod \"nova-api-0\" (UID: \"d6ba8b5d-fee9-4010-b4ba-f6cf7b691a88\") " pod="openstack/nova-api-0" Sep 29 19:27:25 crc kubenswrapper[4779]: I0929 19:27:25.055885 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a8357f7c-f656-4c05-b8ab-cc93c82a1b7c-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"a8357f7c-f656-4c05-b8ab-cc93c82a1b7c\") " pod="openstack/nova-metadata-0" Sep 29 19:27:25 crc kubenswrapper[4779]: I0929 19:27:25.055996 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jp8w8\" (UniqueName: \"kubernetes.io/projected/a8357f7c-f656-4c05-b8ab-cc93c82a1b7c-kube-api-access-jp8w8\") pod \"nova-metadata-0\" (UID: \"a8357f7c-f656-4c05-b8ab-cc93c82a1b7c\") " pod="openstack/nova-metadata-0" Sep 29 19:27:25 crc kubenswrapper[4779]: I0929 19:27:25.056052 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a8357f7c-f656-4c05-b8ab-cc93c82a1b7c-logs\") pod \"nova-metadata-0\" (UID: \"a8357f7c-f656-4c05-b8ab-cc93c82a1b7c\") " pod="openstack/nova-metadata-0" Sep 29 19:27:25 crc kubenswrapper[4779]: I0929 19:27:25.056097 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d6ba8b5d-fee9-4010-b4ba-f6cf7b691a88-config-data\") pod \"nova-api-0\" (UID: \"d6ba8b5d-fee9-4010-b4ba-f6cf7b691a88\") " pod="openstack/nova-api-0" Sep 29 19:27:25 crc kubenswrapper[4779]: I0929 19:27:25.063682 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bd57dbe5-edf2-4209-aec4-255ef19013ff-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"bd57dbe5-edf2-4209-aec4-255ef19013ff\") " pod="openstack/nova-cell1-novncproxy-0" Sep 29 19:27:25 crc kubenswrapper[4779]: I0929 19:27:25.064614 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bd57dbe5-edf2-4209-aec4-255ef19013ff-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"bd57dbe5-edf2-4209-aec4-255ef19013ff\") " pod="openstack/nova-cell1-novncproxy-0" Sep 29 19:27:25 crc kubenswrapper[4779]: I0929 19:27:25.066137 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d6ba8b5d-fee9-4010-b4ba-f6cf7b691a88-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"d6ba8b5d-fee9-4010-b4ba-f6cf7b691a88\") " pod="openstack/nova-api-0" Sep 29 19:27:25 crc kubenswrapper[4779]: I0929 19:27:25.088032 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/da54bd62-a0d1-438e-b7c2-dae26d204540-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"da54bd62-a0d1-438e-b7c2-dae26d204540\") " pod="openstack/nova-scheduler-0" Sep 29 19:27:25 crc kubenswrapper[4779]: I0929 19:27:25.094889 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/da54bd62-a0d1-438e-b7c2-dae26d204540-config-data\") pod \"nova-scheduler-0\" (UID: \"da54bd62-a0d1-438e-b7c2-dae26d204540\") " pod="openstack/nova-scheduler-0" Sep 29 19:27:25 crc kubenswrapper[4779]: I0929 19:27:25.099001 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d6ba8b5d-fee9-4010-b4ba-f6cf7b691a88-config-data\") pod \"nova-api-0\" (UID: \"d6ba8b5d-fee9-4010-b4ba-f6cf7b691a88\") " pod="openstack/nova-api-0" Sep 29 19:27:25 crc kubenswrapper[4779]: I0929 19:27:25.115539 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-845d6d6f59-5q9p4"] Sep 29 19:27:25 crc kubenswrapper[4779]: I0929 19:27:25.117812 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-845d6d6f59-5q9p4" Sep 29 19:27:25 crc kubenswrapper[4779]: I0929 19:27:25.119062 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jvsqf\" (UniqueName: \"kubernetes.io/projected/d6ba8b5d-fee9-4010-b4ba-f6cf7b691a88-kube-api-access-jvsqf\") pod \"nova-api-0\" (UID: \"d6ba8b5d-fee9-4010-b4ba-f6cf7b691a88\") " pod="openstack/nova-api-0" Sep 29 19:27:25 crc kubenswrapper[4779]: I0929 19:27:25.121794 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-skb58\" (UniqueName: \"kubernetes.io/projected/bd57dbe5-edf2-4209-aec4-255ef19013ff-kube-api-access-skb58\") pod \"nova-cell1-novncproxy-0\" (UID: \"bd57dbe5-edf2-4209-aec4-255ef19013ff\") " pod="openstack/nova-cell1-novncproxy-0" Sep 29 19:27:25 crc kubenswrapper[4779]: I0929 19:27:25.130512 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vr2zr\" (UniqueName: \"kubernetes.io/projected/da54bd62-a0d1-438e-b7c2-dae26d204540-kube-api-access-vr2zr\") pod \"nova-scheduler-0\" (UID: \"da54bd62-a0d1-438e-b7c2-dae26d204540\") " pod="openstack/nova-scheduler-0" Sep 29 19:27:25 crc kubenswrapper[4779]: I0929 19:27:25.147933 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-845d6d6f59-5q9p4"] Sep 29 19:27:25 crc kubenswrapper[4779]: I0929 19:27:25.157704 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a8357f7c-f656-4c05-b8ab-cc93c82a1b7c-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"a8357f7c-f656-4c05-b8ab-cc93c82a1b7c\") " pod="openstack/nova-metadata-0" Sep 29 19:27:25 crc kubenswrapper[4779]: I0929 19:27:25.157857 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jp8w8\" (UniqueName: \"kubernetes.io/projected/a8357f7c-f656-4c05-b8ab-cc93c82a1b7c-kube-api-access-jp8w8\") pod \"nova-metadata-0\" (UID: \"a8357f7c-f656-4c05-b8ab-cc93c82a1b7c\") " pod="openstack/nova-metadata-0" Sep 29 19:27:25 crc kubenswrapper[4779]: I0929 19:27:25.157883 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a8357f7c-f656-4c05-b8ab-cc93c82a1b7c-logs\") pod \"nova-metadata-0\" (UID: \"a8357f7c-f656-4c05-b8ab-cc93c82a1b7c\") " pod="openstack/nova-metadata-0" Sep 29 19:27:25 crc kubenswrapper[4779]: I0929 19:27:25.157937 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a8357f7c-f656-4c05-b8ab-cc93c82a1b7c-config-data\") pod \"nova-metadata-0\" (UID: \"a8357f7c-f656-4c05-b8ab-cc93c82a1b7c\") " pod="openstack/nova-metadata-0" Sep 29 19:27:25 crc kubenswrapper[4779]: I0929 19:27:25.159288 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a8357f7c-f656-4c05-b8ab-cc93c82a1b7c-logs\") pod \"nova-metadata-0\" (UID: \"a8357f7c-f656-4c05-b8ab-cc93c82a1b7c\") " pod="openstack/nova-metadata-0" Sep 29 19:27:25 crc kubenswrapper[4779]: I0929 19:27:25.165221 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Sep 29 19:27:25 crc kubenswrapper[4779]: I0929 19:27:25.167798 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a8357f7c-f656-4c05-b8ab-cc93c82a1b7c-config-data\") pod \"nova-metadata-0\" (UID: \"a8357f7c-f656-4c05-b8ab-cc93c82a1b7c\") " pod="openstack/nova-metadata-0" Sep 29 19:27:25 crc kubenswrapper[4779]: I0929 19:27:25.169258 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a8357f7c-f656-4c05-b8ab-cc93c82a1b7c-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"a8357f7c-f656-4c05-b8ab-cc93c82a1b7c\") " pod="openstack/nova-metadata-0" Sep 29 19:27:25 crc kubenswrapper[4779]: I0929 19:27:25.206539 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Sep 29 19:27:25 crc kubenswrapper[4779]: I0929 19:27:25.219524 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jp8w8\" (UniqueName: \"kubernetes.io/projected/a8357f7c-f656-4c05-b8ab-cc93c82a1b7c-kube-api-access-jp8w8\") pod \"nova-metadata-0\" (UID: \"a8357f7c-f656-4c05-b8ab-cc93c82a1b7c\") " pod="openstack/nova-metadata-0" Sep 29 19:27:25 crc kubenswrapper[4779]: I0929 19:27:25.219901 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Sep 29 19:27:25 crc kubenswrapper[4779]: I0929 19:27:25.278699 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/bac34d02-e097-4b82-afda-9b2d885c6fa4-dns-swift-storage-0\") pod \"dnsmasq-dns-845d6d6f59-5q9p4\" (UID: \"bac34d02-e097-4b82-afda-9b2d885c6fa4\") " pod="openstack/dnsmasq-dns-845d6d6f59-5q9p4" Sep 29 19:27:25 crc kubenswrapper[4779]: I0929 19:27:25.278803 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bac34d02-e097-4b82-afda-9b2d885c6fa4-config\") pod \"dnsmasq-dns-845d6d6f59-5q9p4\" (UID: \"bac34d02-e097-4b82-afda-9b2d885c6fa4\") " pod="openstack/dnsmasq-dns-845d6d6f59-5q9p4" Sep 29 19:27:25 crc kubenswrapper[4779]: I0929 19:27:25.278838 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tvwpl\" (UniqueName: \"kubernetes.io/projected/bac34d02-e097-4b82-afda-9b2d885c6fa4-kube-api-access-tvwpl\") pod \"dnsmasq-dns-845d6d6f59-5q9p4\" (UID: \"bac34d02-e097-4b82-afda-9b2d885c6fa4\") " pod="openstack/dnsmasq-dns-845d6d6f59-5q9p4" Sep 29 19:27:25 crc kubenswrapper[4779]: I0929 19:27:25.278870 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/bac34d02-e097-4b82-afda-9b2d885c6fa4-dns-svc\") pod \"dnsmasq-dns-845d6d6f59-5q9p4\" (UID: \"bac34d02-e097-4b82-afda-9b2d885c6fa4\") " pod="openstack/dnsmasq-dns-845d6d6f59-5q9p4" Sep 29 19:27:25 crc kubenswrapper[4779]: I0929 19:27:25.278927 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/bac34d02-e097-4b82-afda-9b2d885c6fa4-ovsdbserver-sb\") pod \"dnsmasq-dns-845d6d6f59-5q9p4\" (UID: \"bac34d02-e097-4b82-afda-9b2d885c6fa4\") " pod="openstack/dnsmasq-dns-845d6d6f59-5q9p4" Sep 29 19:27:25 crc kubenswrapper[4779]: I0929 19:27:25.278955 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/bac34d02-e097-4b82-afda-9b2d885c6fa4-ovsdbserver-nb\") pod \"dnsmasq-dns-845d6d6f59-5q9p4\" (UID: \"bac34d02-e097-4b82-afda-9b2d885c6fa4\") " pod="openstack/dnsmasq-dns-845d6d6f59-5q9p4" Sep 29 19:27:25 crc kubenswrapper[4779]: I0929 19:27:25.319401 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Sep 29 19:27:25 crc kubenswrapper[4779]: I0929 19:27:25.384452 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bac34d02-e097-4b82-afda-9b2d885c6fa4-config\") pod \"dnsmasq-dns-845d6d6f59-5q9p4\" (UID: \"bac34d02-e097-4b82-afda-9b2d885c6fa4\") " pod="openstack/dnsmasq-dns-845d6d6f59-5q9p4" Sep 29 19:27:25 crc kubenswrapper[4779]: I0929 19:27:25.384517 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tvwpl\" (UniqueName: \"kubernetes.io/projected/bac34d02-e097-4b82-afda-9b2d885c6fa4-kube-api-access-tvwpl\") pod \"dnsmasq-dns-845d6d6f59-5q9p4\" (UID: \"bac34d02-e097-4b82-afda-9b2d885c6fa4\") " pod="openstack/dnsmasq-dns-845d6d6f59-5q9p4" Sep 29 19:27:25 crc kubenswrapper[4779]: I0929 19:27:25.384544 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/bac34d02-e097-4b82-afda-9b2d885c6fa4-dns-svc\") pod \"dnsmasq-dns-845d6d6f59-5q9p4\" (UID: \"bac34d02-e097-4b82-afda-9b2d885c6fa4\") " pod="openstack/dnsmasq-dns-845d6d6f59-5q9p4" Sep 29 19:27:25 crc kubenswrapper[4779]: I0929 19:27:25.384586 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/bac34d02-e097-4b82-afda-9b2d885c6fa4-ovsdbserver-sb\") pod \"dnsmasq-dns-845d6d6f59-5q9p4\" (UID: \"bac34d02-e097-4b82-afda-9b2d885c6fa4\") " pod="openstack/dnsmasq-dns-845d6d6f59-5q9p4" Sep 29 19:27:25 crc kubenswrapper[4779]: I0929 19:27:25.384613 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/bac34d02-e097-4b82-afda-9b2d885c6fa4-ovsdbserver-nb\") pod \"dnsmasq-dns-845d6d6f59-5q9p4\" (UID: \"bac34d02-e097-4b82-afda-9b2d885c6fa4\") " pod="openstack/dnsmasq-dns-845d6d6f59-5q9p4" Sep 29 19:27:25 crc kubenswrapper[4779]: I0929 19:27:25.384642 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/bac34d02-e097-4b82-afda-9b2d885c6fa4-dns-swift-storage-0\") pod \"dnsmasq-dns-845d6d6f59-5q9p4\" (UID: \"bac34d02-e097-4b82-afda-9b2d885c6fa4\") " pod="openstack/dnsmasq-dns-845d6d6f59-5q9p4" Sep 29 19:27:25 crc kubenswrapper[4779]: I0929 19:27:25.385551 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/bac34d02-e097-4b82-afda-9b2d885c6fa4-dns-swift-storage-0\") pod \"dnsmasq-dns-845d6d6f59-5q9p4\" (UID: \"bac34d02-e097-4b82-afda-9b2d885c6fa4\") " pod="openstack/dnsmasq-dns-845d6d6f59-5q9p4" Sep 29 19:27:25 crc kubenswrapper[4779]: I0929 19:27:25.388152 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/bac34d02-e097-4b82-afda-9b2d885c6fa4-ovsdbserver-sb\") pod \"dnsmasq-dns-845d6d6f59-5q9p4\" (UID: \"bac34d02-e097-4b82-afda-9b2d885c6fa4\") " pod="openstack/dnsmasq-dns-845d6d6f59-5q9p4" Sep 29 19:27:25 crc kubenswrapper[4779]: I0929 19:27:25.390498 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/bac34d02-e097-4b82-afda-9b2d885c6fa4-ovsdbserver-nb\") pod \"dnsmasq-dns-845d6d6f59-5q9p4\" (UID: \"bac34d02-e097-4b82-afda-9b2d885c6fa4\") " pod="openstack/dnsmasq-dns-845d6d6f59-5q9p4" Sep 29 19:27:25 crc kubenswrapper[4779]: I0929 19:27:25.390505 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/bac34d02-e097-4b82-afda-9b2d885c6fa4-dns-svc\") pod \"dnsmasq-dns-845d6d6f59-5q9p4\" (UID: \"bac34d02-e097-4b82-afda-9b2d885c6fa4\") " pod="openstack/dnsmasq-dns-845d6d6f59-5q9p4" Sep 29 19:27:25 crc kubenswrapper[4779]: I0929 19:27:25.392864 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bac34d02-e097-4b82-afda-9b2d885c6fa4-config\") pod \"dnsmasq-dns-845d6d6f59-5q9p4\" (UID: \"bac34d02-e097-4b82-afda-9b2d885c6fa4\") " pod="openstack/dnsmasq-dns-845d6d6f59-5q9p4" Sep 29 19:27:25 crc kubenswrapper[4779]: I0929 19:27:25.408454 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tvwpl\" (UniqueName: \"kubernetes.io/projected/bac34d02-e097-4b82-afda-9b2d885c6fa4-kube-api-access-tvwpl\") pod \"dnsmasq-dns-845d6d6f59-5q9p4\" (UID: \"bac34d02-e097-4b82-afda-9b2d885c6fa4\") " pod="openstack/dnsmasq-dns-845d6d6f59-5q9p4" Sep 29 19:27:25 crc kubenswrapper[4779]: I0929 19:27:25.465122 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-845d6d6f59-5q9p4" Sep 29 19:27:25 crc kubenswrapper[4779]: I0929 19:27:25.575829 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-cell-mapping-9jlbk"] Sep 29 19:27:25 crc kubenswrapper[4779]: I0929 19:27:25.728151 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-9jlbk" event={"ID":"bf8c83fc-5e0d-41f8-b1f6-f7004e73ec39","Type":"ContainerStarted","Data":"fcdfad0c61b61745b863c383cd7feb5ac71df46ad3424d573923cfb824d8ba01"} Sep 29 19:27:25 crc kubenswrapper[4779]: I0929 19:27:25.788216 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-db-sync-dbzcc"] Sep 29 19:27:25 crc kubenswrapper[4779]: I0929 19:27:25.789346 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-dbzcc" Sep 29 19:27:25 crc kubenswrapper[4779]: I0929 19:27:25.791077 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-dbzcc"] Sep 29 19:27:25 crc kubenswrapper[4779]: I0929 19:27:25.793836 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-scripts" Sep 29 19:27:25 crc kubenswrapper[4779]: I0929 19:27:25.794701 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-config-data" Sep 29 19:27:25 crc kubenswrapper[4779]: I0929 19:27:25.832676 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Sep 29 19:27:25 crc kubenswrapper[4779]: I0929 19:27:25.892508 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d5hgr\" (UniqueName: \"kubernetes.io/projected/4e3a6b3d-7933-45c0-934c-00846c0783d7-kube-api-access-d5hgr\") pod \"nova-cell1-conductor-db-sync-dbzcc\" (UID: \"4e3a6b3d-7933-45c0-934c-00846c0783d7\") " pod="openstack/nova-cell1-conductor-db-sync-dbzcc" Sep 29 19:27:25 crc kubenswrapper[4779]: I0929 19:27:25.892667 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4e3a6b3d-7933-45c0-934c-00846c0783d7-scripts\") pod \"nova-cell1-conductor-db-sync-dbzcc\" (UID: \"4e3a6b3d-7933-45c0-934c-00846c0783d7\") " pod="openstack/nova-cell1-conductor-db-sync-dbzcc" Sep 29 19:27:25 crc kubenswrapper[4779]: I0929 19:27:25.892694 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4e3a6b3d-7933-45c0-934c-00846c0783d7-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-dbzcc\" (UID: \"4e3a6b3d-7933-45c0-934c-00846c0783d7\") " pod="openstack/nova-cell1-conductor-db-sync-dbzcc" Sep 29 19:27:25 crc kubenswrapper[4779]: I0929 19:27:25.893870 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4e3a6b3d-7933-45c0-934c-00846c0783d7-config-data\") pod \"nova-cell1-conductor-db-sync-dbzcc\" (UID: \"4e3a6b3d-7933-45c0-934c-00846c0783d7\") " pod="openstack/nova-cell1-conductor-db-sync-dbzcc" Sep 29 19:27:25 crc kubenswrapper[4779]: I0929 19:27:25.916474 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Sep 29 19:27:25 crc kubenswrapper[4779]: I0929 19:27:25.977426 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Sep 29 19:27:25 crc kubenswrapper[4779]: I0929 19:27:25.999163 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d5hgr\" (UniqueName: \"kubernetes.io/projected/4e3a6b3d-7933-45c0-934c-00846c0783d7-kube-api-access-d5hgr\") pod \"nova-cell1-conductor-db-sync-dbzcc\" (UID: \"4e3a6b3d-7933-45c0-934c-00846c0783d7\") " pod="openstack/nova-cell1-conductor-db-sync-dbzcc" Sep 29 19:27:25 crc kubenswrapper[4779]: I0929 19:27:25.999366 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4e3a6b3d-7933-45c0-934c-00846c0783d7-scripts\") pod \"nova-cell1-conductor-db-sync-dbzcc\" (UID: \"4e3a6b3d-7933-45c0-934c-00846c0783d7\") " pod="openstack/nova-cell1-conductor-db-sync-dbzcc" Sep 29 19:27:25 crc kubenswrapper[4779]: I0929 19:27:25.999415 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4e3a6b3d-7933-45c0-934c-00846c0783d7-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-dbzcc\" (UID: \"4e3a6b3d-7933-45c0-934c-00846c0783d7\") " pod="openstack/nova-cell1-conductor-db-sync-dbzcc" Sep 29 19:27:25 crc kubenswrapper[4779]: I0929 19:27:25.999454 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4e3a6b3d-7933-45c0-934c-00846c0783d7-config-data\") pod \"nova-cell1-conductor-db-sync-dbzcc\" (UID: \"4e3a6b3d-7933-45c0-934c-00846c0783d7\") " pod="openstack/nova-cell1-conductor-db-sync-dbzcc" Sep 29 19:27:26 crc kubenswrapper[4779]: I0929 19:27:26.008074 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4e3a6b3d-7933-45c0-934c-00846c0783d7-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-dbzcc\" (UID: \"4e3a6b3d-7933-45c0-934c-00846c0783d7\") " pod="openstack/nova-cell1-conductor-db-sync-dbzcc" Sep 29 19:27:26 crc kubenswrapper[4779]: I0929 19:27:26.009257 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4e3a6b3d-7933-45c0-934c-00846c0783d7-config-data\") pod \"nova-cell1-conductor-db-sync-dbzcc\" (UID: \"4e3a6b3d-7933-45c0-934c-00846c0783d7\") " pod="openstack/nova-cell1-conductor-db-sync-dbzcc" Sep 29 19:27:26 crc kubenswrapper[4779]: I0929 19:27:26.015006 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4e3a6b3d-7933-45c0-934c-00846c0783d7-scripts\") pod \"nova-cell1-conductor-db-sync-dbzcc\" (UID: \"4e3a6b3d-7933-45c0-934c-00846c0783d7\") " pod="openstack/nova-cell1-conductor-db-sync-dbzcc" Sep 29 19:27:26 crc kubenswrapper[4779]: I0929 19:27:26.021980 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d5hgr\" (UniqueName: \"kubernetes.io/projected/4e3a6b3d-7933-45c0-934c-00846c0783d7-kube-api-access-d5hgr\") pod \"nova-cell1-conductor-db-sync-dbzcc\" (UID: \"4e3a6b3d-7933-45c0-934c-00846c0783d7\") " pod="openstack/nova-cell1-conductor-db-sync-dbzcc" Sep 29 19:27:26 crc kubenswrapper[4779]: I0929 19:27:26.093654 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Sep 29 19:27:26 crc kubenswrapper[4779]: I0929 19:27:26.099944 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-845d6d6f59-5q9p4"] Sep 29 19:27:26 crc kubenswrapper[4779]: I0929 19:27:26.189921 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-dbzcc" Sep 29 19:27:26 crc kubenswrapper[4779]: I0929 19:27:26.625426 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-dbzcc"] Sep 29 19:27:26 crc kubenswrapper[4779]: I0929 19:27:26.737901 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"d6ba8b5d-fee9-4010-b4ba-f6cf7b691a88","Type":"ContainerStarted","Data":"1ae843fb89e96ef407e9da22be3ec3d1f475d1231893de904389b8a38491a35a"} Sep 29 19:27:26 crc kubenswrapper[4779]: I0929 19:27:26.739303 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-dbzcc" event={"ID":"4e3a6b3d-7933-45c0-934c-00846c0783d7","Type":"ContainerStarted","Data":"4518b56d9e1a05242b45793fd6cecc292300d5e170628e95ecc58ca8afa086a6"} Sep 29 19:27:26 crc kubenswrapper[4779]: I0929 19:27:26.740963 4779 generic.go:334] "Generic (PLEG): container finished" podID="bac34d02-e097-4b82-afda-9b2d885c6fa4" containerID="acfdc443fa3fae973aab723aa20f246dacd2162251a5997d5b312a762757de35" exitCode=0 Sep 29 19:27:26 crc kubenswrapper[4779]: I0929 19:27:26.741174 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-845d6d6f59-5q9p4" event={"ID":"bac34d02-e097-4b82-afda-9b2d885c6fa4","Type":"ContainerDied","Data":"acfdc443fa3fae973aab723aa20f246dacd2162251a5997d5b312a762757de35"} Sep 29 19:27:26 crc kubenswrapper[4779]: I0929 19:27:26.741229 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-845d6d6f59-5q9p4" event={"ID":"bac34d02-e097-4b82-afda-9b2d885c6fa4","Type":"ContainerStarted","Data":"c2da79f17a717c0bd4b240fed4a4d7e090f1aee670d813eb54d0e3b66d632f8c"} Sep 29 19:27:26 crc kubenswrapper[4779]: I0929 19:27:26.743460 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"bd57dbe5-edf2-4209-aec4-255ef19013ff","Type":"ContainerStarted","Data":"d317abe7e8c3d4afc8f8111156a2b0653c7497997a38023159b98f0bae634a4c"} Sep 29 19:27:26 crc kubenswrapper[4779]: I0929 19:27:26.745499 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"a8357f7c-f656-4c05-b8ab-cc93c82a1b7c","Type":"ContainerStarted","Data":"c3f429a0fd792ddcf9f9953900bd9eca404a5430215c31b8cfec58982901ee8d"} Sep 29 19:27:26 crc kubenswrapper[4779]: I0929 19:27:26.748098 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-9jlbk" event={"ID":"bf8c83fc-5e0d-41f8-b1f6-f7004e73ec39","Type":"ContainerStarted","Data":"6aa05722e5d057c8fcc215edfab704dadea3447805c2496ee9ee2f263c29dedc"} Sep 29 19:27:26 crc kubenswrapper[4779]: I0929 19:27:26.753002 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"da54bd62-a0d1-438e-b7c2-dae26d204540","Type":"ContainerStarted","Data":"5eb4c2937f56807a68144dc34e69402cbb0b7cda5dadf24e6ddfcda95cf046e3"} Sep 29 19:27:26 crc kubenswrapper[4779]: I0929 19:27:26.802361 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-cell-mapping-9jlbk" podStartSLOduration=2.802301088 podStartE2EDuration="2.802301088s" podCreationTimestamp="2025-09-29 19:27:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 19:27:26.794185456 +0000 UTC m=+1157.678610556" watchObservedRunningTime="2025-09-29 19:27:26.802301088 +0000 UTC m=+1157.686726188" Sep 29 19:27:27 crc kubenswrapper[4779]: I0929 19:27:27.779391 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-845d6d6f59-5q9p4" Sep 29 19:27:27 crc kubenswrapper[4779]: I0929 19:27:27.779690 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-dbzcc" event={"ID":"4e3a6b3d-7933-45c0-934c-00846c0783d7","Type":"ContainerStarted","Data":"926b73d08bda76eac4969ce958a86ccd0d9bc085a58523b570bb045b65784955"} Sep 29 19:27:27 crc kubenswrapper[4779]: I0929 19:27:27.779709 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-845d6d6f59-5q9p4" event={"ID":"bac34d02-e097-4b82-afda-9b2d885c6fa4","Type":"ContainerStarted","Data":"8f77a64600ab6f18c1e7bc3108a6f83bc8926d553e2b6541c7f35f82ebf77a23"} Sep 29 19:27:27 crc kubenswrapper[4779]: I0929 19:27:27.789235 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-db-sync-dbzcc" podStartSLOduration=2.7892227050000002 podStartE2EDuration="2.789222705s" podCreationTimestamp="2025-09-29 19:27:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 19:27:27.783502859 +0000 UTC m=+1158.667927989" watchObservedRunningTime="2025-09-29 19:27:27.789222705 +0000 UTC m=+1158.673647805" Sep 29 19:27:27 crc kubenswrapper[4779]: I0929 19:27:27.811146 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-845d6d6f59-5q9p4" podStartSLOduration=2.811128323 podStartE2EDuration="2.811128323s" podCreationTimestamp="2025-09-29 19:27:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 19:27:27.803410542 +0000 UTC m=+1158.687835642" watchObservedRunningTime="2025-09-29 19:27:27.811128323 +0000 UTC m=+1158.695553423" Sep 29 19:27:28 crc kubenswrapper[4779]: I0929 19:27:28.925020 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Sep 29 19:27:29 crc kubenswrapper[4779]: I0929 19:27:29.003150 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Sep 29 19:27:29 crc kubenswrapper[4779]: I0929 19:27:29.809148 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"bd57dbe5-edf2-4209-aec4-255ef19013ff","Type":"ContainerStarted","Data":"b4bdefe484633d644d4fb6e23f38a95a3d0e5d19997622f8cbab1b2b9e5dbf32"} Sep 29 19:27:29 crc kubenswrapper[4779]: I0929 19:27:29.822624 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"a8357f7c-f656-4c05-b8ab-cc93c82a1b7c","Type":"ContainerStarted","Data":"1c9778ec4d63551117bdeb932f250c4e87a793a27d8a5d8f32d8369c1c3c517f"} Sep 29 19:27:29 crc kubenswrapper[4779]: I0929 19:27:29.822701 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"a8357f7c-f656-4c05-b8ab-cc93c82a1b7c","Type":"ContainerStarted","Data":"a2e947d718363fd1a40e9c4d85c1c795e21e7a1a20250e8407ef63be7cb690b3"} Sep 29 19:27:29 crc kubenswrapper[4779]: I0929 19:27:29.822980 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="a8357f7c-f656-4c05-b8ab-cc93c82a1b7c" containerName="nova-metadata-metadata" containerID="cri-o://1c9778ec4d63551117bdeb932f250c4e87a793a27d8a5d8f32d8369c1c3c517f" gracePeriod=30 Sep 29 19:27:29 crc kubenswrapper[4779]: I0929 19:27:29.823137 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="a8357f7c-f656-4c05-b8ab-cc93c82a1b7c" containerName="nova-metadata-log" containerID="cri-o://a2e947d718363fd1a40e9c4d85c1c795e21e7a1a20250e8407ef63be7cb690b3" gracePeriod=30 Sep 29 19:27:29 crc kubenswrapper[4779]: I0929 19:27:29.825455 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"da54bd62-a0d1-438e-b7c2-dae26d204540","Type":"ContainerStarted","Data":"ba0ab139f0b098b085398c9159daa1e0a7ce62d4c53cc66a00a63f4c0b4c9a75"} Sep 29 19:27:29 crc kubenswrapper[4779]: I0929 19:27:29.830805 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"d6ba8b5d-fee9-4010-b4ba-f6cf7b691a88","Type":"ContainerStarted","Data":"99f5ae8752e103200194b8828deb36d0cbdf83d662d24b97112332882c327f61"} Sep 29 19:27:29 crc kubenswrapper[4779]: I0929 19:27:29.830877 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"d6ba8b5d-fee9-4010-b4ba-f6cf7b691a88","Type":"ContainerStarted","Data":"97ee4126c9ec4201626463dce109def16f68fb077c4f49da0fcbc0d6a12a737d"} Sep 29 19:27:29 crc kubenswrapper[4779]: I0929 19:27:29.901654 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=3.211355491 podStartE2EDuration="5.901635739s" podCreationTimestamp="2025-09-29 19:27:24 +0000 UTC" firstStartedPulling="2025-09-29 19:27:25.853501462 +0000 UTC m=+1156.737926562" lastFinishedPulling="2025-09-29 19:27:28.54378171 +0000 UTC m=+1159.428206810" observedRunningTime="2025-09-29 19:27:29.895198134 +0000 UTC m=+1160.779623254" watchObservedRunningTime="2025-09-29 19:27:29.901635739 +0000 UTC m=+1160.786060849" Sep 29 19:27:29 crc kubenswrapper[4779]: I0929 19:27:29.917409 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=3.4755832030000002 podStartE2EDuration="5.917389939s" podCreationTimestamp="2025-09-29 19:27:24 +0000 UTC" firstStartedPulling="2025-09-29 19:27:26.108020559 +0000 UTC m=+1156.992445659" lastFinishedPulling="2025-09-29 19:27:28.549827275 +0000 UTC m=+1159.434252395" observedRunningTime="2025-09-29 19:27:29.913648867 +0000 UTC m=+1160.798073977" watchObservedRunningTime="2025-09-29 19:27:29.917389939 +0000 UTC m=+1160.801815049" Sep 29 19:27:29 crc kubenswrapper[4779]: I0929 19:27:29.941879 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=3.311226098 podStartE2EDuration="5.941850007s" podCreationTimestamp="2025-09-29 19:27:24 +0000 UTC" firstStartedPulling="2025-09-29 19:27:25.917838278 +0000 UTC m=+1156.802263378" lastFinishedPulling="2025-09-29 19:27:28.548462187 +0000 UTC m=+1159.432887287" observedRunningTime="2025-09-29 19:27:29.934186508 +0000 UTC m=+1160.818611648" watchObservedRunningTime="2025-09-29 19:27:29.941850007 +0000 UTC m=+1160.826275107" Sep 29 19:27:29 crc kubenswrapper[4779]: I0929 19:27:29.969724 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-novncproxy-0" podStartSLOduration=3.400818743 podStartE2EDuration="5.969703147s" podCreationTimestamp="2025-09-29 19:27:24 +0000 UTC" firstStartedPulling="2025-09-29 19:27:25.991846328 +0000 UTC m=+1156.876271428" lastFinishedPulling="2025-09-29 19:27:28.560730712 +0000 UTC m=+1159.445155832" observedRunningTime="2025-09-29 19:27:29.959135859 +0000 UTC m=+1160.843560959" watchObservedRunningTime="2025-09-29 19:27:29.969703147 +0000 UTC m=+1160.854128257" Sep 29 19:27:30 crc kubenswrapper[4779]: I0929 19:27:30.214798 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-novncproxy-0" Sep 29 19:27:30 crc kubenswrapper[4779]: I0929 19:27:30.221030 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Sep 29 19:27:30 crc kubenswrapper[4779]: I0929 19:27:30.320156 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Sep 29 19:27:30 crc kubenswrapper[4779]: I0929 19:27:30.320418 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Sep 29 19:27:30 crc kubenswrapper[4779]: I0929 19:27:30.805940 4779 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ceilometer-0" podUID="a5cb4dde-7de2-43a6-aeeb-92350ca1454d" containerName="proxy-httpd" probeResult="failure" output="HTTP probe failed with statuscode: 503" Sep 29 19:27:30 crc kubenswrapper[4779]: I0929 19:27:30.843134 4779 generic.go:334] "Generic (PLEG): container finished" podID="a8357f7c-f656-4c05-b8ab-cc93c82a1b7c" containerID="1c9778ec4d63551117bdeb932f250c4e87a793a27d8a5d8f32d8369c1c3c517f" exitCode=0 Sep 29 19:27:30 crc kubenswrapper[4779]: I0929 19:27:30.843168 4779 generic.go:334] "Generic (PLEG): container finished" podID="a8357f7c-f656-4c05-b8ab-cc93c82a1b7c" containerID="a2e947d718363fd1a40e9c4d85c1c795e21e7a1a20250e8407ef63be7cb690b3" exitCode=143 Sep 29 19:27:30 crc kubenswrapper[4779]: I0929 19:27:30.843362 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-cell1-novncproxy-0" podUID="bd57dbe5-edf2-4209-aec4-255ef19013ff" containerName="nova-cell1-novncproxy-novncproxy" containerID="cri-o://b4bdefe484633d644d4fb6e23f38a95a3d0e5d19997622f8cbab1b2b9e5dbf32" gracePeriod=30 Sep 29 19:27:30 crc kubenswrapper[4779]: I0929 19:27:30.843595 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"a8357f7c-f656-4c05-b8ab-cc93c82a1b7c","Type":"ContainerDied","Data":"1c9778ec4d63551117bdeb932f250c4e87a793a27d8a5d8f32d8369c1c3c517f"} Sep 29 19:27:30 crc kubenswrapper[4779]: I0929 19:27:30.843628 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"a8357f7c-f656-4c05-b8ab-cc93c82a1b7c","Type":"ContainerDied","Data":"a2e947d718363fd1a40e9c4d85c1c795e21e7a1a20250e8407ef63be7cb690b3"} Sep 29 19:27:30 crc kubenswrapper[4779]: I0929 19:27:30.843639 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"a8357f7c-f656-4c05-b8ab-cc93c82a1b7c","Type":"ContainerDied","Data":"c3f429a0fd792ddcf9f9953900bd9eca404a5430215c31b8cfec58982901ee8d"} Sep 29 19:27:30 crc kubenswrapper[4779]: I0929 19:27:30.843650 4779 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c3f429a0fd792ddcf9f9953900bd9eca404a5430215c31b8cfec58982901ee8d" Sep 29 19:27:30 crc kubenswrapper[4779]: I0929 19:27:30.889631 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Sep 29 19:27:31 crc kubenswrapper[4779]: I0929 19:27:31.018359 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a8357f7c-f656-4c05-b8ab-cc93c82a1b7c-config-data\") pod \"a8357f7c-f656-4c05-b8ab-cc93c82a1b7c\" (UID: \"a8357f7c-f656-4c05-b8ab-cc93c82a1b7c\") " Sep 29 19:27:31 crc kubenswrapper[4779]: I0929 19:27:31.018480 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a8357f7c-f656-4c05-b8ab-cc93c82a1b7c-logs\") pod \"a8357f7c-f656-4c05-b8ab-cc93c82a1b7c\" (UID: \"a8357f7c-f656-4c05-b8ab-cc93c82a1b7c\") " Sep 29 19:27:31 crc kubenswrapper[4779]: I0929 19:27:31.018499 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a8357f7c-f656-4c05-b8ab-cc93c82a1b7c-combined-ca-bundle\") pod \"a8357f7c-f656-4c05-b8ab-cc93c82a1b7c\" (UID: \"a8357f7c-f656-4c05-b8ab-cc93c82a1b7c\") " Sep 29 19:27:31 crc kubenswrapper[4779]: I0929 19:27:31.018685 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jp8w8\" (UniqueName: \"kubernetes.io/projected/a8357f7c-f656-4c05-b8ab-cc93c82a1b7c-kube-api-access-jp8w8\") pod \"a8357f7c-f656-4c05-b8ab-cc93c82a1b7c\" (UID: \"a8357f7c-f656-4c05-b8ab-cc93c82a1b7c\") " Sep 29 19:27:31 crc kubenswrapper[4779]: I0929 19:27:31.018874 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a8357f7c-f656-4c05-b8ab-cc93c82a1b7c-logs" (OuterVolumeSpecName: "logs") pod "a8357f7c-f656-4c05-b8ab-cc93c82a1b7c" (UID: "a8357f7c-f656-4c05-b8ab-cc93c82a1b7c"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 19:27:31 crc kubenswrapper[4779]: I0929 19:27:31.019046 4779 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a8357f7c-f656-4c05-b8ab-cc93c82a1b7c-logs\") on node \"crc\" DevicePath \"\"" Sep 29 19:27:31 crc kubenswrapper[4779]: I0929 19:27:31.024538 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a8357f7c-f656-4c05-b8ab-cc93c82a1b7c-kube-api-access-jp8w8" (OuterVolumeSpecName: "kube-api-access-jp8w8") pod "a8357f7c-f656-4c05-b8ab-cc93c82a1b7c" (UID: "a8357f7c-f656-4c05-b8ab-cc93c82a1b7c"). InnerVolumeSpecName "kube-api-access-jp8w8". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:27:31 crc kubenswrapper[4779]: I0929 19:27:31.046934 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a8357f7c-f656-4c05-b8ab-cc93c82a1b7c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a8357f7c-f656-4c05-b8ab-cc93c82a1b7c" (UID: "a8357f7c-f656-4c05-b8ab-cc93c82a1b7c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:27:31 crc kubenswrapper[4779]: I0929 19:27:31.055163 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a8357f7c-f656-4c05-b8ab-cc93c82a1b7c-config-data" (OuterVolumeSpecName: "config-data") pod "a8357f7c-f656-4c05-b8ab-cc93c82a1b7c" (UID: "a8357f7c-f656-4c05-b8ab-cc93c82a1b7c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:27:31 crc kubenswrapper[4779]: I0929 19:27:31.120543 4779 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a8357f7c-f656-4c05-b8ab-cc93c82a1b7c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 19:27:31 crc kubenswrapper[4779]: I0929 19:27:31.120587 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jp8w8\" (UniqueName: \"kubernetes.io/projected/a8357f7c-f656-4c05-b8ab-cc93c82a1b7c-kube-api-access-jp8w8\") on node \"crc\" DevicePath \"\"" Sep 29 19:27:31 crc kubenswrapper[4779]: I0929 19:27:31.120601 4779 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a8357f7c-f656-4c05-b8ab-cc93c82a1b7c-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 19:27:31 crc kubenswrapper[4779]: I0929 19:27:31.550290 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Sep 29 19:27:31 crc kubenswrapper[4779]: I0929 19:27:31.629998 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-skb58\" (UniqueName: \"kubernetes.io/projected/bd57dbe5-edf2-4209-aec4-255ef19013ff-kube-api-access-skb58\") pod \"bd57dbe5-edf2-4209-aec4-255ef19013ff\" (UID: \"bd57dbe5-edf2-4209-aec4-255ef19013ff\") " Sep 29 19:27:31 crc kubenswrapper[4779]: I0929 19:27:31.630090 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bd57dbe5-edf2-4209-aec4-255ef19013ff-config-data\") pod \"bd57dbe5-edf2-4209-aec4-255ef19013ff\" (UID: \"bd57dbe5-edf2-4209-aec4-255ef19013ff\") " Sep 29 19:27:31 crc kubenswrapper[4779]: I0929 19:27:31.630240 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bd57dbe5-edf2-4209-aec4-255ef19013ff-combined-ca-bundle\") pod \"bd57dbe5-edf2-4209-aec4-255ef19013ff\" (UID: \"bd57dbe5-edf2-4209-aec4-255ef19013ff\") " Sep 29 19:27:31 crc kubenswrapper[4779]: I0929 19:27:31.634450 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bd57dbe5-edf2-4209-aec4-255ef19013ff-kube-api-access-skb58" (OuterVolumeSpecName: "kube-api-access-skb58") pod "bd57dbe5-edf2-4209-aec4-255ef19013ff" (UID: "bd57dbe5-edf2-4209-aec4-255ef19013ff"). InnerVolumeSpecName "kube-api-access-skb58". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:27:31 crc kubenswrapper[4779]: I0929 19:27:31.661826 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bd57dbe5-edf2-4209-aec4-255ef19013ff-config-data" (OuterVolumeSpecName: "config-data") pod "bd57dbe5-edf2-4209-aec4-255ef19013ff" (UID: "bd57dbe5-edf2-4209-aec4-255ef19013ff"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:27:31 crc kubenswrapper[4779]: I0929 19:27:31.680536 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bd57dbe5-edf2-4209-aec4-255ef19013ff-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "bd57dbe5-edf2-4209-aec4-255ef19013ff" (UID: "bd57dbe5-edf2-4209-aec4-255ef19013ff"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:27:31 crc kubenswrapper[4779]: I0929 19:27:31.732049 4779 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bd57dbe5-edf2-4209-aec4-255ef19013ff-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 19:27:31 crc kubenswrapper[4779]: I0929 19:27:31.732087 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-skb58\" (UniqueName: \"kubernetes.io/projected/bd57dbe5-edf2-4209-aec4-255ef19013ff-kube-api-access-skb58\") on node \"crc\" DevicePath \"\"" Sep 29 19:27:31 crc kubenswrapper[4779]: I0929 19:27:31.732101 4779 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bd57dbe5-edf2-4209-aec4-255ef19013ff-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 19:27:31 crc kubenswrapper[4779]: I0929 19:27:31.874512 4779 generic.go:334] "Generic (PLEG): container finished" podID="bd57dbe5-edf2-4209-aec4-255ef19013ff" containerID="b4bdefe484633d644d4fb6e23f38a95a3d0e5d19997622f8cbab1b2b9e5dbf32" exitCode=0 Sep 29 19:27:31 crc kubenswrapper[4779]: I0929 19:27:31.874534 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"bd57dbe5-edf2-4209-aec4-255ef19013ff","Type":"ContainerDied","Data":"b4bdefe484633d644d4fb6e23f38a95a3d0e5d19997622f8cbab1b2b9e5dbf32"} Sep 29 19:27:31 crc kubenswrapper[4779]: I0929 19:27:31.874585 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Sep 29 19:27:31 crc kubenswrapper[4779]: I0929 19:27:31.874631 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"bd57dbe5-edf2-4209-aec4-255ef19013ff","Type":"ContainerDied","Data":"d317abe7e8c3d4afc8f8111156a2b0653c7497997a38023159b98f0bae634a4c"} Sep 29 19:27:31 crc kubenswrapper[4779]: I0929 19:27:31.874617 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Sep 29 19:27:31 crc kubenswrapper[4779]: I0929 19:27:31.874704 4779 scope.go:117] "RemoveContainer" containerID="b4bdefe484633d644d4fb6e23f38a95a3d0e5d19997622f8cbab1b2b9e5dbf32" Sep 29 19:27:31 crc kubenswrapper[4779]: I0929 19:27:31.915950 4779 scope.go:117] "RemoveContainer" containerID="b4bdefe484633d644d4fb6e23f38a95a3d0e5d19997622f8cbab1b2b9e5dbf32" Sep 29 19:27:31 crc kubenswrapper[4779]: I0929 19:27:31.923702 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Sep 29 19:27:31 crc kubenswrapper[4779]: E0929 19:27:31.926777 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b4bdefe484633d644d4fb6e23f38a95a3d0e5d19997622f8cbab1b2b9e5dbf32\": container with ID starting with b4bdefe484633d644d4fb6e23f38a95a3d0e5d19997622f8cbab1b2b9e5dbf32 not found: ID does not exist" containerID="b4bdefe484633d644d4fb6e23f38a95a3d0e5d19997622f8cbab1b2b9e5dbf32" Sep 29 19:27:31 crc kubenswrapper[4779]: I0929 19:27:31.926816 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b4bdefe484633d644d4fb6e23f38a95a3d0e5d19997622f8cbab1b2b9e5dbf32"} err="failed to get container status \"b4bdefe484633d644d4fb6e23f38a95a3d0e5d19997622f8cbab1b2b9e5dbf32\": rpc error: code = NotFound desc = could not find container \"b4bdefe484633d644d4fb6e23f38a95a3d0e5d19997622f8cbab1b2b9e5dbf32\": container with ID starting with b4bdefe484633d644d4fb6e23f38a95a3d0e5d19997622f8cbab1b2b9e5dbf32 not found: ID does not exist" Sep 29 19:27:31 crc kubenswrapper[4779]: I0929 19:27:31.935470 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Sep 29 19:27:31 crc kubenswrapper[4779]: I0929 19:27:31.964624 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Sep 29 19:27:31 crc kubenswrapper[4779]: I0929 19:27:31.978590 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Sep 29 19:27:31 crc kubenswrapper[4779]: I0929 19:27:31.986286 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Sep 29 19:27:31 crc kubenswrapper[4779]: E0929 19:27:31.986820 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a8357f7c-f656-4c05-b8ab-cc93c82a1b7c" containerName="nova-metadata-log" Sep 29 19:27:31 crc kubenswrapper[4779]: I0929 19:27:31.986834 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="a8357f7c-f656-4c05-b8ab-cc93c82a1b7c" containerName="nova-metadata-log" Sep 29 19:27:31 crc kubenswrapper[4779]: E0929 19:27:31.986874 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a8357f7c-f656-4c05-b8ab-cc93c82a1b7c" containerName="nova-metadata-metadata" Sep 29 19:27:31 crc kubenswrapper[4779]: I0929 19:27:31.986881 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="a8357f7c-f656-4c05-b8ab-cc93c82a1b7c" containerName="nova-metadata-metadata" Sep 29 19:27:31 crc kubenswrapper[4779]: E0929 19:27:31.986891 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bd57dbe5-edf2-4209-aec4-255ef19013ff" containerName="nova-cell1-novncproxy-novncproxy" Sep 29 19:27:31 crc kubenswrapper[4779]: I0929 19:27:31.986897 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="bd57dbe5-edf2-4209-aec4-255ef19013ff" containerName="nova-cell1-novncproxy-novncproxy" Sep 29 19:27:31 crc kubenswrapper[4779]: I0929 19:27:31.987197 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="bd57dbe5-edf2-4209-aec4-255ef19013ff" containerName="nova-cell1-novncproxy-novncproxy" Sep 29 19:27:31 crc kubenswrapper[4779]: I0929 19:27:31.987309 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="a8357f7c-f656-4c05-b8ab-cc93c82a1b7c" containerName="nova-metadata-metadata" Sep 29 19:27:31 crc kubenswrapper[4779]: I0929 19:27:31.987359 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="a8357f7c-f656-4c05-b8ab-cc93c82a1b7c" containerName="nova-metadata-log" Sep 29 19:27:31 crc kubenswrapper[4779]: I0929 19:27:31.988593 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Sep 29 19:27:31 crc kubenswrapper[4779]: I0929 19:27:31.991794 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-novncproxy-config-data" Sep 29 19:27:31 crc kubenswrapper[4779]: I0929 19:27:31.991890 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-novncproxy-cell1-vencrypt" Sep 29 19:27:31 crc kubenswrapper[4779]: I0929 19:27:31.991847 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-novncproxy-cell1-public-svc" Sep 29 19:27:31 crc kubenswrapper[4779]: I0929 19:27:31.997261 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Sep 29 19:27:32 crc kubenswrapper[4779]: I0929 19:27:32.006489 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Sep 29 19:27:32 crc kubenswrapper[4779]: I0929 19:27:32.007964 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Sep 29 19:27:32 crc kubenswrapper[4779]: I0929 19:27:32.010237 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Sep 29 19:27:32 crc kubenswrapper[4779]: I0929 19:27:32.010667 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Sep 29 19:27:32 crc kubenswrapper[4779]: I0929 19:27:32.017805 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Sep 29 19:27:32 crc kubenswrapper[4779]: I0929 19:27:32.140423 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/92729eb4-c37d-4cd9-ae70-2e7b2c3f2c88-logs\") pod \"nova-metadata-0\" (UID: \"92729eb4-c37d-4cd9-ae70-2e7b2c3f2c88\") " pod="openstack/nova-metadata-0" Sep 29 19:27:32 crc kubenswrapper[4779]: I0929 19:27:32.140724 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/d6e31019-bd61-45e8-9380-b973dcbe4873-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"d6e31019-bd61-45e8-9380-b973dcbe4873\") " pod="openstack/nova-cell1-novncproxy-0" Sep 29 19:27:32 crc kubenswrapper[4779]: I0929 19:27:32.140778 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d6e31019-bd61-45e8-9380-b973dcbe4873-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"d6e31019-bd61-45e8-9380-b973dcbe4873\") " pod="openstack/nova-cell1-novncproxy-0" Sep 29 19:27:32 crc kubenswrapper[4779]: I0929 19:27:32.140963 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/92729eb4-c37d-4cd9-ae70-2e7b2c3f2c88-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"92729eb4-c37d-4cd9-ae70-2e7b2c3f2c88\") " pod="openstack/nova-metadata-0" Sep 29 19:27:32 crc kubenswrapper[4779]: I0929 19:27:32.141069 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d6e31019-bd61-45e8-9380-b973dcbe4873-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"d6e31019-bd61-45e8-9380-b973dcbe4873\") " pod="openstack/nova-cell1-novncproxy-0" Sep 29 19:27:32 crc kubenswrapper[4779]: I0929 19:27:32.141174 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/92729eb4-c37d-4cd9-ae70-2e7b2c3f2c88-config-data\") pod \"nova-metadata-0\" (UID: \"92729eb4-c37d-4cd9-ae70-2e7b2c3f2c88\") " pod="openstack/nova-metadata-0" Sep 29 19:27:32 crc kubenswrapper[4779]: I0929 19:27:32.141232 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bmqq7\" (UniqueName: \"kubernetes.io/projected/92729eb4-c37d-4cd9-ae70-2e7b2c3f2c88-kube-api-access-bmqq7\") pod \"nova-metadata-0\" (UID: \"92729eb4-c37d-4cd9-ae70-2e7b2c3f2c88\") " pod="openstack/nova-metadata-0" Sep 29 19:27:32 crc kubenswrapper[4779]: I0929 19:27:32.141362 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/d6e31019-bd61-45e8-9380-b973dcbe4873-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"d6e31019-bd61-45e8-9380-b973dcbe4873\") " pod="openstack/nova-cell1-novncproxy-0" Sep 29 19:27:32 crc kubenswrapper[4779]: I0929 19:27:32.141534 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/92729eb4-c37d-4cd9-ae70-2e7b2c3f2c88-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"92729eb4-c37d-4cd9-ae70-2e7b2c3f2c88\") " pod="openstack/nova-metadata-0" Sep 29 19:27:32 crc kubenswrapper[4779]: I0929 19:27:32.141578 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pbgb9\" (UniqueName: \"kubernetes.io/projected/d6e31019-bd61-45e8-9380-b973dcbe4873-kube-api-access-pbgb9\") pod \"nova-cell1-novncproxy-0\" (UID: \"d6e31019-bd61-45e8-9380-b973dcbe4873\") " pod="openstack/nova-cell1-novncproxy-0" Sep 29 19:27:32 crc kubenswrapper[4779]: I0929 19:27:32.243286 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pbgb9\" (UniqueName: \"kubernetes.io/projected/d6e31019-bd61-45e8-9380-b973dcbe4873-kube-api-access-pbgb9\") pod \"nova-cell1-novncproxy-0\" (UID: \"d6e31019-bd61-45e8-9380-b973dcbe4873\") " pod="openstack/nova-cell1-novncproxy-0" Sep 29 19:27:32 crc kubenswrapper[4779]: I0929 19:27:32.243661 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/92729eb4-c37d-4cd9-ae70-2e7b2c3f2c88-logs\") pod \"nova-metadata-0\" (UID: \"92729eb4-c37d-4cd9-ae70-2e7b2c3f2c88\") " pod="openstack/nova-metadata-0" Sep 29 19:27:32 crc kubenswrapper[4779]: I0929 19:27:32.243783 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d6e31019-bd61-45e8-9380-b973dcbe4873-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"d6e31019-bd61-45e8-9380-b973dcbe4873\") " pod="openstack/nova-cell1-novncproxy-0" Sep 29 19:27:32 crc kubenswrapper[4779]: I0929 19:27:32.243821 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/d6e31019-bd61-45e8-9380-b973dcbe4873-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"d6e31019-bd61-45e8-9380-b973dcbe4873\") " pod="openstack/nova-cell1-novncproxy-0" Sep 29 19:27:32 crc kubenswrapper[4779]: I0929 19:27:32.243893 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/92729eb4-c37d-4cd9-ae70-2e7b2c3f2c88-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"92729eb4-c37d-4cd9-ae70-2e7b2c3f2c88\") " pod="openstack/nova-metadata-0" Sep 29 19:27:32 crc kubenswrapper[4779]: I0929 19:27:32.243993 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d6e31019-bd61-45e8-9380-b973dcbe4873-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"d6e31019-bd61-45e8-9380-b973dcbe4873\") " pod="openstack/nova-cell1-novncproxy-0" Sep 29 19:27:32 crc kubenswrapper[4779]: I0929 19:27:32.244364 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/92729eb4-c37d-4cd9-ae70-2e7b2c3f2c88-config-data\") pod \"nova-metadata-0\" (UID: \"92729eb4-c37d-4cd9-ae70-2e7b2c3f2c88\") " pod="openstack/nova-metadata-0" Sep 29 19:27:32 crc kubenswrapper[4779]: I0929 19:27:32.244429 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bmqq7\" (UniqueName: \"kubernetes.io/projected/92729eb4-c37d-4cd9-ae70-2e7b2c3f2c88-kube-api-access-bmqq7\") pod \"nova-metadata-0\" (UID: \"92729eb4-c37d-4cd9-ae70-2e7b2c3f2c88\") " pod="openstack/nova-metadata-0" Sep 29 19:27:32 crc kubenswrapper[4779]: I0929 19:27:32.244473 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/d6e31019-bd61-45e8-9380-b973dcbe4873-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"d6e31019-bd61-45e8-9380-b973dcbe4873\") " pod="openstack/nova-cell1-novncproxy-0" Sep 29 19:27:32 crc kubenswrapper[4779]: I0929 19:27:32.244527 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/92729eb4-c37d-4cd9-ae70-2e7b2c3f2c88-logs\") pod \"nova-metadata-0\" (UID: \"92729eb4-c37d-4cd9-ae70-2e7b2c3f2c88\") " pod="openstack/nova-metadata-0" Sep 29 19:27:32 crc kubenswrapper[4779]: I0929 19:27:32.244561 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/92729eb4-c37d-4cd9-ae70-2e7b2c3f2c88-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"92729eb4-c37d-4cd9-ae70-2e7b2c3f2c88\") " pod="openstack/nova-metadata-0" Sep 29 19:27:32 crc kubenswrapper[4779]: I0929 19:27:32.250690 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d6e31019-bd61-45e8-9380-b973dcbe4873-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"d6e31019-bd61-45e8-9380-b973dcbe4873\") " pod="openstack/nova-cell1-novncproxy-0" Sep 29 19:27:32 crc kubenswrapper[4779]: I0929 19:27:32.253197 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/92729eb4-c37d-4cd9-ae70-2e7b2c3f2c88-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"92729eb4-c37d-4cd9-ae70-2e7b2c3f2c88\") " pod="openstack/nova-metadata-0" Sep 29 19:27:32 crc kubenswrapper[4779]: I0929 19:27:32.259539 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/d6e31019-bd61-45e8-9380-b973dcbe4873-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"d6e31019-bd61-45e8-9380-b973dcbe4873\") " pod="openstack/nova-cell1-novncproxy-0" Sep 29 19:27:32 crc kubenswrapper[4779]: I0929 19:27:32.262573 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/92729eb4-c37d-4cd9-ae70-2e7b2c3f2c88-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"92729eb4-c37d-4cd9-ae70-2e7b2c3f2c88\") " pod="openstack/nova-metadata-0" Sep 29 19:27:32 crc kubenswrapper[4779]: I0929 19:27:32.266872 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d6e31019-bd61-45e8-9380-b973dcbe4873-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"d6e31019-bd61-45e8-9380-b973dcbe4873\") " pod="openstack/nova-cell1-novncproxy-0" Sep 29 19:27:32 crc kubenswrapper[4779]: I0929 19:27:32.269048 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/d6e31019-bd61-45e8-9380-b973dcbe4873-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"d6e31019-bd61-45e8-9380-b973dcbe4873\") " pod="openstack/nova-cell1-novncproxy-0" Sep 29 19:27:32 crc kubenswrapper[4779]: I0929 19:27:32.269250 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bmqq7\" (UniqueName: \"kubernetes.io/projected/92729eb4-c37d-4cd9-ae70-2e7b2c3f2c88-kube-api-access-bmqq7\") pod \"nova-metadata-0\" (UID: \"92729eb4-c37d-4cd9-ae70-2e7b2c3f2c88\") " pod="openstack/nova-metadata-0" Sep 29 19:27:32 crc kubenswrapper[4779]: I0929 19:27:32.271206 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/92729eb4-c37d-4cd9-ae70-2e7b2c3f2c88-config-data\") pod \"nova-metadata-0\" (UID: \"92729eb4-c37d-4cd9-ae70-2e7b2c3f2c88\") " pod="openstack/nova-metadata-0" Sep 29 19:27:32 crc kubenswrapper[4779]: I0929 19:27:32.275560 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pbgb9\" (UniqueName: \"kubernetes.io/projected/d6e31019-bd61-45e8-9380-b973dcbe4873-kube-api-access-pbgb9\") pod \"nova-cell1-novncproxy-0\" (UID: \"d6e31019-bd61-45e8-9380-b973dcbe4873\") " pod="openstack/nova-cell1-novncproxy-0" Sep 29 19:27:32 crc kubenswrapper[4779]: I0929 19:27:32.314011 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Sep 29 19:27:32 crc kubenswrapper[4779]: I0929 19:27:32.331226 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Sep 29 19:27:32 crc kubenswrapper[4779]: I0929 19:27:32.781920 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Sep 29 19:27:32 crc kubenswrapper[4779]: I0929 19:27:32.844245 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Sep 29 19:27:32 crc kubenswrapper[4779]: I0929 19:27:32.886547 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"92729eb4-c37d-4cd9-ae70-2e7b2c3f2c88","Type":"ContainerStarted","Data":"590ca9dc4747fab9297cb348341ea0cbc9f4cfac7deac52b4004ca5eb8b8806c"} Sep 29 19:27:32 crc kubenswrapper[4779]: I0929 19:27:32.889039 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"d6e31019-bd61-45e8-9380-b973dcbe4873","Type":"ContainerStarted","Data":"dcf05bbb9c1da581b040cab3ffec030495835e1494a2d13e640e0b0c092582bb"} Sep 29 19:27:32 crc kubenswrapper[4779]: I0929 19:27:32.896371 4779 generic.go:334] "Generic (PLEG): container finished" podID="bf8c83fc-5e0d-41f8-b1f6-f7004e73ec39" containerID="6aa05722e5d057c8fcc215edfab704dadea3447805c2496ee9ee2f263c29dedc" exitCode=0 Sep 29 19:27:32 crc kubenswrapper[4779]: I0929 19:27:32.896413 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-9jlbk" event={"ID":"bf8c83fc-5e0d-41f8-b1f6-f7004e73ec39","Type":"ContainerDied","Data":"6aa05722e5d057c8fcc215edfab704dadea3447805c2496ee9ee2f263c29dedc"} Sep 29 19:27:33 crc kubenswrapper[4779]: I0929 19:27:33.782864 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a8357f7c-f656-4c05-b8ab-cc93c82a1b7c" path="/var/lib/kubelet/pods/a8357f7c-f656-4c05-b8ab-cc93c82a1b7c/volumes" Sep 29 19:27:33 crc kubenswrapper[4779]: I0929 19:27:33.784398 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bd57dbe5-edf2-4209-aec4-255ef19013ff" path="/var/lib/kubelet/pods/bd57dbe5-edf2-4209-aec4-255ef19013ff/volumes" Sep 29 19:27:33 crc kubenswrapper[4779]: I0929 19:27:33.914264 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"92729eb4-c37d-4cd9-ae70-2e7b2c3f2c88","Type":"ContainerStarted","Data":"631bc481834079f3afe6a370d9ddbbf57e39abfd564144c0d70300fe7e712ac9"} Sep 29 19:27:33 crc kubenswrapper[4779]: I0929 19:27:33.914609 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"92729eb4-c37d-4cd9-ae70-2e7b2c3f2c88","Type":"ContainerStarted","Data":"1ffcf561a7a1ecbd202e37769ddebaf117fb29939b84b85de7d27e08a93dd503"} Sep 29 19:27:33 crc kubenswrapper[4779]: I0929 19:27:33.919057 4779 generic.go:334] "Generic (PLEG): container finished" podID="4e3a6b3d-7933-45c0-934c-00846c0783d7" containerID="926b73d08bda76eac4969ce958a86ccd0d9bc085a58523b570bb045b65784955" exitCode=0 Sep 29 19:27:33 crc kubenswrapper[4779]: I0929 19:27:33.919131 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-dbzcc" event={"ID":"4e3a6b3d-7933-45c0-934c-00846c0783d7","Type":"ContainerDied","Data":"926b73d08bda76eac4969ce958a86ccd0d9bc085a58523b570bb045b65784955"} Sep 29 19:27:33 crc kubenswrapper[4779]: I0929 19:27:33.921419 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"d6e31019-bd61-45e8-9380-b973dcbe4873","Type":"ContainerStarted","Data":"d3158ec39b2a4f6407c6ea69ae8c55337aa54fa952e8cef3ba8195674f72d5e9"} Sep 29 19:27:33 crc kubenswrapper[4779]: I0929 19:27:33.977661 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.977633007 podStartE2EDuration="2.977633007s" podCreationTimestamp="2025-09-29 19:27:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 19:27:33.943139126 +0000 UTC m=+1164.827564236" watchObservedRunningTime="2025-09-29 19:27:33.977633007 +0000 UTC m=+1164.862058107" Sep 29 19:27:33 crc kubenswrapper[4779]: I0929 19:27:33.988483 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-novncproxy-0" podStartSLOduration=2.988465743 podStartE2EDuration="2.988465743s" podCreationTimestamp="2025-09-29 19:27:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 19:27:33.987343182 +0000 UTC m=+1164.871768322" watchObservedRunningTime="2025-09-29 19:27:33.988465743 +0000 UTC m=+1164.872890843" Sep 29 19:27:34 crc kubenswrapper[4779]: I0929 19:27:34.349981 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-9jlbk" Sep 29 19:27:34 crc kubenswrapper[4779]: I0929 19:27:34.500906 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/bf8c83fc-5e0d-41f8-b1f6-f7004e73ec39-scripts\") pod \"bf8c83fc-5e0d-41f8-b1f6-f7004e73ec39\" (UID: \"bf8c83fc-5e0d-41f8-b1f6-f7004e73ec39\") " Sep 29 19:27:34 crc kubenswrapper[4779]: I0929 19:27:34.500982 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bf8c83fc-5e0d-41f8-b1f6-f7004e73ec39-combined-ca-bundle\") pod \"bf8c83fc-5e0d-41f8-b1f6-f7004e73ec39\" (UID: \"bf8c83fc-5e0d-41f8-b1f6-f7004e73ec39\") " Sep 29 19:27:34 crc kubenswrapper[4779]: I0929 19:27:34.501123 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bf8c83fc-5e0d-41f8-b1f6-f7004e73ec39-config-data\") pod \"bf8c83fc-5e0d-41f8-b1f6-f7004e73ec39\" (UID: \"bf8c83fc-5e0d-41f8-b1f6-f7004e73ec39\") " Sep 29 19:27:34 crc kubenswrapper[4779]: I0929 19:27:34.501165 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bck58\" (UniqueName: \"kubernetes.io/projected/bf8c83fc-5e0d-41f8-b1f6-f7004e73ec39-kube-api-access-bck58\") pod \"bf8c83fc-5e0d-41f8-b1f6-f7004e73ec39\" (UID: \"bf8c83fc-5e0d-41f8-b1f6-f7004e73ec39\") " Sep 29 19:27:34 crc kubenswrapper[4779]: I0929 19:27:34.519915 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf8c83fc-5e0d-41f8-b1f6-f7004e73ec39-kube-api-access-bck58" (OuterVolumeSpecName: "kube-api-access-bck58") pod "bf8c83fc-5e0d-41f8-b1f6-f7004e73ec39" (UID: "bf8c83fc-5e0d-41f8-b1f6-f7004e73ec39"). InnerVolumeSpecName "kube-api-access-bck58". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:27:34 crc kubenswrapper[4779]: I0929 19:27:34.519913 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bf8c83fc-5e0d-41f8-b1f6-f7004e73ec39-scripts" (OuterVolumeSpecName: "scripts") pod "bf8c83fc-5e0d-41f8-b1f6-f7004e73ec39" (UID: "bf8c83fc-5e0d-41f8-b1f6-f7004e73ec39"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:27:34 crc kubenswrapper[4779]: I0929 19:27:34.529216 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bf8c83fc-5e0d-41f8-b1f6-f7004e73ec39-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "bf8c83fc-5e0d-41f8-b1f6-f7004e73ec39" (UID: "bf8c83fc-5e0d-41f8-b1f6-f7004e73ec39"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:27:34 crc kubenswrapper[4779]: I0929 19:27:34.549073 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bf8c83fc-5e0d-41f8-b1f6-f7004e73ec39-config-data" (OuterVolumeSpecName: "config-data") pod "bf8c83fc-5e0d-41f8-b1f6-f7004e73ec39" (UID: "bf8c83fc-5e0d-41f8-b1f6-f7004e73ec39"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:27:34 crc kubenswrapper[4779]: I0929 19:27:34.603212 4779 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/bf8c83fc-5e0d-41f8-b1f6-f7004e73ec39-scripts\") on node \"crc\" DevicePath \"\"" Sep 29 19:27:34 crc kubenswrapper[4779]: I0929 19:27:34.603255 4779 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bf8c83fc-5e0d-41f8-b1f6-f7004e73ec39-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 19:27:34 crc kubenswrapper[4779]: I0929 19:27:34.603266 4779 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bf8c83fc-5e0d-41f8-b1f6-f7004e73ec39-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 19:27:34 crc kubenswrapper[4779]: I0929 19:27:34.603277 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bck58\" (UniqueName: \"kubernetes.io/projected/bf8c83fc-5e0d-41f8-b1f6-f7004e73ec39-kube-api-access-bck58\") on node \"crc\" DevicePath \"\"" Sep 29 19:27:34 crc kubenswrapper[4779]: I0929 19:27:34.938263 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-9jlbk" event={"ID":"bf8c83fc-5e0d-41f8-b1f6-f7004e73ec39","Type":"ContainerDied","Data":"fcdfad0c61b61745b863c383cd7feb5ac71df46ad3424d573923cfb824d8ba01"} Sep 29 19:27:34 crc kubenswrapper[4779]: I0929 19:27:34.938330 4779 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="fcdfad0c61b61745b863c383cd7feb5ac71df46ad3424d573923cfb824d8ba01" Sep 29 19:27:34 crc kubenswrapper[4779]: I0929 19:27:34.938419 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-9jlbk" Sep 29 19:27:35 crc kubenswrapper[4779]: I0929 19:27:35.104377 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Sep 29 19:27:35 crc kubenswrapper[4779]: I0929 19:27:35.104592 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="d6ba8b5d-fee9-4010-b4ba-f6cf7b691a88" containerName="nova-api-log" containerID="cri-o://97ee4126c9ec4201626463dce109def16f68fb077c4f49da0fcbc0d6a12a737d" gracePeriod=30 Sep 29 19:27:35 crc kubenswrapper[4779]: I0929 19:27:35.104686 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="d6ba8b5d-fee9-4010-b4ba-f6cf7b691a88" containerName="nova-api-api" containerID="cri-o://99f5ae8752e103200194b8828deb36d0cbdf83d662d24b97112332882c327f61" gracePeriod=30 Sep 29 19:27:35 crc kubenswrapper[4779]: I0929 19:27:35.130074 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Sep 29 19:27:35 crc kubenswrapper[4779]: I0929 19:27:35.131648 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="da54bd62-a0d1-438e-b7c2-dae26d204540" containerName="nova-scheduler-scheduler" containerID="cri-o://ba0ab139f0b098b085398c9159daa1e0a7ce62d4c53cc66a00a63f4c0b4c9a75" gracePeriod=30 Sep 29 19:27:35 crc kubenswrapper[4779]: I0929 19:27:35.144957 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Sep 29 19:27:35 crc kubenswrapper[4779]: I0929 19:27:35.420652 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-dbzcc" Sep 29 19:27:35 crc kubenswrapper[4779]: I0929 19:27:35.468532 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-845d6d6f59-5q9p4" Sep 29 19:27:35 crc kubenswrapper[4779]: I0929 19:27:35.520573 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4e3a6b3d-7933-45c0-934c-00846c0783d7-scripts\") pod \"4e3a6b3d-7933-45c0-934c-00846c0783d7\" (UID: \"4e3a6b3d-7933-45c0-934c-00846c0783d7\") " Sep 29 19:27:35 crc kubenswrapper[4779]: I0929 19:27:35.520717 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4e3a6b3d-7933-45c0-934c-00846c0783d7-config-data\") pod \"4e3a6b3d-7933-45c0-934c-00846c0783d7\" (UID: \"4e3a6b3d-7933-45c0-934c-00846c0783d7\") " Sep 29 19:27:35 crc kubenswrapper[4779]: I0929 19:27:35.520747 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d5hgr\" (UniqueName: \"kubernetes.io/projected/4e3a6b3d-7933-45c0-934c-00846c0783d7-kube-api-access-d5hgr\") pod \"4e3a6b3d-7933-45c0-934c-00846c0783d7\" (UID: \"4e3a6b3d-7933-45c0-934c-00846c0783d7\") " Sep 29 19:27:35 crc kubenswrapper[4779]: I0929 19:27:35.520794 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4e3a6b3d-7933-45c0-934c-00846c0783d7-combined-ca-bundle\") pod \"4e3a6b3d-7933-45c0-934c-00846c0783d7\" (UID: \"4e3a6b3d-7933-45c0-934c-00846c0783d7\") " Sep 29 19:27:35 crc kubenswrapper[4779]: I0929 19:27:35.525895 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4e3a6b3d-7933-45c0-934c-00846c0783d7-scripts" (OuterVolumeSpecName: "scripts") pod "4e3a6b3d-7933-45c0-934c-00846c0783d7" (UID: "4e3a6b3d-7933-45c0-934c-00846c0783d7"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:27:35 crc kubenswrapper[4779]: I0929 19:27:35.535602 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4e3a6b3d-7933-45c0-934c-00846c0783d7-kube-api-access-d5hgr" (OuterVolumeSpecName: "kube-api-access-d5hgr") pod "4e3a6b3d-7933-45c0-934c-00846c0783d7" (UID: "4e3a6b3d-7933-45c0-934c-00846c0783d7"). InnerVolumeSpecName "kube-api-access-d5hgr". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:27:35 crc kubenswrapper[4779]: I0929 19:27:35.555814 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4e3a6b3d-7933-45c0-934c-00846c0783d7-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "4e3a6b3d-7933-45c0-934c-00846c0783d7" (UID: "4e3a6b3d-7933-45c0-934c-00846c0783d7"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:27:35 crc kubenswrapper[4779]: I0929 19:27:35.566503 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5784cf869f-nx69q"] Sep 29 19:27:35 crc kubenswrapper[4779]: I0929 19:27:35.566766 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-5784cf869f-nx69q" podUID="b39d898a-ba9c-4d8f-96eb-6e7cb5bcf703" containerName="dnsmasq-dns" containerID="cri-o://1be1062c512f6b7378ae42a72eb7f0e278c7a5b9b52463e15a34d91db5542732" gracePeriod=10 Sep 29 19:27:35 crc kubenswrapper[4779]: I0929 19:27:35.577374 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4e3a6b3d-7933-45c0-934c-00846c0783d7-config-data" (OuterVolumeSpecName: "config-data") pod "4e3a6b3d-7933-45c0-934c-00846c0783d7" (UID: "4e3a6b3d-7933-45c0-934c-00846c0783d7"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:27:35 crc kubenswrapper[4779]: I0929 19:27:35.624496 4779 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4e3a6b3d-7933-45c0-934c-00846c0783d7-scripts\") on node \"crc\" DevicePath \"\"" Sep 29 19:27:35 crc kubenswrapper[4779]: I0929 19:27:35.624528 4779 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4e3a6b3d-7933-45c0-934c-00846c0783d7-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 19:27:35 crc kubenswrapper[4779]: I0929 19:27:35.624543 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d5hgr\" (UniqueName: \"kubernetes.io/projected/4e3a6b3d-7933-45c0-934c-00846c0783d7-kube-api-access-d5hgr\") on node \"crc\" DevicePath \"\"" Sep 29 19:27:35 crc kubenswrapper[4779]: I0929 19:27:35.624576 4779 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4e3a6b3d-7933-45c0-934c-00846c0783d7-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 19:27:35 crc kubenswrapper[4779]: I0929 19:27:35.710108 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Sep 29 19:27:35 crc kubenswrapper[4779]: I0929 19:27:35.827600 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d6ba8b5d-fee9-4010-b4ba-f6cf7b691a88-combined-ca-bundle\") pod \"d6ba8b5d-fee9-4010-b4ba-f6cf7b691a88\" (UID: \"d6ba8b5d-fee9-4010-b4ba-f6cf7b691a88\") " Sep 29 19:27:35 crc kubenswrapper[4779]: I0929 19:27:35.827670 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d6ba8b5d-fee9-4010-b4ba-f6cf7b691a88-config-data\") pod \"d6ba8b5d-fee9-4010-b4ba-f6cf7b691a88\" (UID: \"d6ba8b5d-fee9-4010-b4ba-f6cf7b691a88\") " Sep 29 19:27:35 crc kubenswrapper[4779]: I0929 19:27:35.827875 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d6ba8b5d-fee9-4010-b4ba-f6cf7b691a88-logs\") pod \"d6ba8b5d-fee9-4010-b4ba-f6cf7b691a88\" (UID: \"d6ba8b5d-fee9-4010-b4ba-f6cf7b691a88\") " Sep 29 19:27:35 crc kubenswrapper[4779]: I0929 19:27:35.827931 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jvsqf\" (UniqueName: \"kubernetes.io/projected/d6ba8b5d-fee9-4010-b4ba-f6cf7b691a88-kube-api-access-jvsqf\") pod \"d6ba8b5d-fee9-4010-b4ba-f6cf7b691a88\" (UID: \"d6ba8b5d-fee9-4010-b4ba-f6cf7b691a88\") " Sep 29 19:27:35 crc kubenswrapper[4779]: I0929 19:27:35.828519 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d6ba8b5d-fee9-4010-b4ba-f6cf7b691a88-logs" (OuterVolumeSpecName: "logs") pod "d6ba8b5d-fee9-4010-b4ba-f6cf7b691a88" (UID: "d6ba8b5d-fee9-4010-b4ba-f6cf7b691a88"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 19:27:35 crc kubenswrapper[4779]: I0929 19:27:35.832067 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d6ba8b5d-fee9-4010-b4ba-f6cf7b691a88-kube-api-access-jvsqf" (OuterVolumeSpecName: "kube-api-access-jvsqf") pod "d6ba8b5d-fee9-4010-b4ba-f6cf7b691a88" (UID: "d6ba8b5d-fee9-4010-b4ba-f6cf7b691a88"). InnerVolumeSpecName "kube-api-access-jvsqf". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:27:35 crc kubenswrapper[4779]: I0929 19:27:35.856875 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d6ba8b5d-fee9-4010-b4ba-f6cf7b691a88-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d6ba8b5d-fee9-4010-b4ba-f6cf7b691a88" (UID: "d6ba8b5d-fee9-4010-b4ba-f6cf7b691a88"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:27:35 crc kubenswrapper[4779]: I0929 19:27:35.857066 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d6ba8b5d-fee9-4010-b4ba-f6cf7b691a88-config-data" (OuterVolumeSpecName: "config-data") pod "d6ba8b5d-fee9-4010-b4ba-f6cf7b691a88" (UID: "d6ba8b5d-fee9-4010-b4ba-f6cf7b691a88"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:27:35 crc kubenswrapper[4779]: I0929 19:27:35.930125 4779 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d6ba8b5d-fee9-4010-b4ba-f6cf7b691a88-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 19:27:35 crc kubenswrapper[4779]: I0929 19:27:35.930153 4779 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d6ba8b5d-fee9-4010-b4ba-f6cf7b691a88-logs\") on node \"crc\" DevicePath \"\"" Sep 29 19:27:35 crc kubenswrapper[4779]: I0929 19:27:35.930163 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jvsqf\" (UniqueName: \"kubernetes.io/projected/d6ba8b5d-fee9-4010-b4ba-f6cf7b691a88-kube-api-access-jvsqf\") on node \"crc\" DevicePath \"\"" Sep 29 19:27:35 crc kubenswrapper[4779]: I0929 19:27:35.930173 4779 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d6ba8b5d-fee9-4010-b4ba-f6cf7b691a88-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 19:27:35 crc kubenswrapper[4779]: I0929 19:27:35.950922 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-dbzcc" Sep 29 19:27:35 crc kubenswrapper[4779]: I0929 19:27:35.950940 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-dbzcc" event={"ID":"4e3a6b3d-7933-45c0-934c-00846c0783d7","Type":"ContainerDied","Data":"4518b56d9e1a05242b45793fd6cecc292300d5e170628e95ecc58ca8afa086a6"} Sep 29 19:27:35 crc kubenswrapper[4779]: I0929 19:27:35.951099 4779 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4518b56d9e1a05242b45793fd6cecc292300d5e170628e95ecc58ca8afa086a6" Sep 29 19:27:35 crc kubenswrapper[4779]: I0929 19:27:35.956788 4779 generic.go:334] "Generic (PLEG): container finished" podID="b39d898a-ba9c-4d8f-96eb-6e7cb5bcf703" containerID="1be1062c512f6b7378ae42a72eb7f0e278c7a5b9b52463e15a34d91db5542732" exitCode=0 Sep 29 19:27:35 crc kubenswrapper[4779]: I0929 19:27:35.956852 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5784cf869f-nx69q" event={"ID":"b39d898a-ba9c-4d8f-96eb-6e7cb5bcf703","Type":"ContainerDied","Data":"1be1062c512f6b7378ae42a72eb7f0e278c7a5b9b52463e15a34d91db5542732"} Sep 29 19:27:35 crc kubenswrapper[4779]: I0929 19:27:35.961775 4779 generic.go:334] "Generic (PLEG): container finished" podID="d6ba8b5d-fee9-4010-b4ba-f6cf7b691a88" containerID="99f5ae8752e103200194b8828deb36d0cbdf83d662d24b97112332882c327f61" exitCode=0 Sep 29 19:27:35 crc kubenswrapper[4779]: I0929 19:27:35.961810 4779 generic.go:334] "Generic (PLEG): container finished" podID="d6ba8b5d-fee9-4010-b4ba-f6cf7b691a88" containerID="97ee4126c9ec4201626463dce109def16f68fb077c4f49da0fcbc0d6a12a737d" exitCode=143 Sep 29 19:27:35 crc kubenswrapper[4779]: I0929 19:27:35.962044 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="92729eb4-c37d-4cd9-ae70-2e7b2c3f2c88" containerName="nova-metadata-log" containerID="cri-o://1ffcf561a7a1ecbd202e37769ddebaf117fb29939b84b85de7d27e08a93dd503" gracePeriod=30 Sep 29 19:27:35 crc kubenswrapper[4779]: I0929 19:27:35.962440 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Sep 29 19:27:35 crc kubenswrapper[4779]: I0929 19:27:35.963422 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"d6ba8b5d-fee9-4010-b4ba-f6cf7b691a88","Type":"ContainerDied","Data":"99f5ae8752e103200194b8828deb36d0cbdf83d662d24b97112332882c327f61"} Sep 29 19:27:35 crc kubenswrapper[4779]: I0929 19:27:35.963511 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"d6ba8b5d-fee9-4010-b4ba-f6cf7b691a88","Type":"ContainerDied","Data":"97ee4126c9ec4201626463dce109def16f68fb077c4f49da0fcbc0d6a12a737d"} Sep 29 19:27:35 crc kubenswrapper[4779]: I0929 19:27:35.963552 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"d6ba8b5d-fee9-4010-b4ba-f6cf7b691a88","Type":"ContainerDied","Data":"1ae843fb89e96ef407e9da22be3ec3d1f475d1231893de904389b8a38491a35a"} Sep 29 19:27:35 crc kubenswrapper[4779]: I0929 19:27:35.963571 4779 scope.go:117] "RemoveContainer" containerID="99f5ae8752e103200194b8828deb36d0cbdf83d662d24b97112332882c327f61" Sep 29 19:27:35 crc kubenswrapper[4779]: I0929 19:27:35.964292 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="92729eb4-c37d-4cd9-ae70-2e7b2c3f2c88" containerName="nova-metadata-metadata" containerID="cri-o://631bc481834079f3afe6a370d9ddbbf57e39abfd564144c0d70300fe7e712ac9" gracePeriod=30 Sep 29 19:27:35 crc kubenswrapper[4779]: I0929 19:27:35.998598 4779 scope.go:117] "RemoveContainer" containerID="97ee4126c9ec4201626463dce109def16f68fb077c4f49da0fcbc0d6a12a737d" Sep 29 19:27:36 crc kubenswrapper[4779]: I0929 19:27:36.021578 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Sep 29 19:27:36 crc kubenswrapper[4779]: I0929 19:27:36.032680 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5784cf869f-nx69q" Sep 29 19:27:36 crc kubenswrapper[4779]: I0929 19:27:36.057500 4779 scope.go:117] "RemoveContainer" containerID="99f5ae8752e103200194b8828deb36d0cbdf83d662d24b97112332882c327f61" Sep 29 19:27:36 crc kubenswrapper[4779]: I0929 19:27:36.062437 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Sep 29 19:27:36 crc kubenswrapper[4779]: I0929 19:27:36.070911 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Sep 29 19:27:36 crc kubenswrapper[4779]: E0929 19:27:36.071276 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b39d898a-ba9c-4d8f-96eb-6e7cb5bcf703" containerName="init" Sep 29 19:27:36 crc kubenswrapper[4779]: I0929 19:27:36.071293 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="b39d898a-ba9c-4d8f-96eb-6e7cb5bcf703" containerName="init" Sep 29 19:27:36 crc kubenswrapper[4779]: E0929 19:27:36.071308 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4e3a6b3d-7933-45c0-934c-00846c0783d7" containerName="nova-cell1-conductor-db-sync" Sep 29 19:27:36 crc kubenswrapper[4779]: I0929 19:27:36.071326 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="4e3a6b3d-7933-45c0-934c-00846c0783d7" containerName="nova-cell1-conductor-db-sync" Sep 29 19:27:36 crc kubenswrapper[4779]: E0929 19:27:36.071338 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b39d898a-ba9c-4d8f-96eb-6e7cb5bcf703" containerName="dnsmasq-dns" Sep 29 19:27:36 crc kubenswrapper[4779]: I0929 19:27:36.071343 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="b39d898a-ba9c-4d8f-96eb-6e7cb5bcf703" containerName="dnsmasq-dns" Sep 29 19:27:36 crc kubenswrapper[4779]: E0929 19:27:36.071361 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d6ba8b5d-fee9-4010-b4ba-f6cf7b691a88" containerName="nova-api-log" Sep 29 19:27:36 crc kubenswrapper[4779]: I0929 19:27:36.071367 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="d6ba8b5d-fee9-4010-b4ba-f6cf7b691a88" containerName="nova-api-log" Sep 29 19:27:36 crc kubenswrapper[4779]: E0929 19:27:36.071383 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bf8c83fc-5e0d-41f8-b1f6-f7004e73ec39" containerName="nova-manage" Sep 29 19:27:36 crc kubenswrapper[4779]: I0929 19:27:36.071388 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="bf8c83fc-5e0d-41f8-b1f6-f7004e73ec39" containerName="nova-manage" Sep 29 19:27:36 crc kubenswrapper[4779]: E0929 19:27:36.071409 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d6ba8b5d-fee9-4010-b4ba-f6cf7b691a88" containerName="nova-api-api" Sep 29 19:27:36 crc kubenswrapper[4779]: I0929 19:27:36.071417 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="d6ba8b5d-fee9-4010-b4ba-f6cf7b691a88" containerName="nova-api-api" Sep 29 19:27:36 crc kubenswrapper[4779]: I0929 19:27:36.071591 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="d6ba8b5d-fee9-4010-b4ba-f6cf7b691a88" containerName="nova-api-log" Sep 29 19:27:36 crc kubenswrapper[4779]: I0929 19:27:36.071602 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="bf8c83fc-5e0d-41f8-b1f6-f7004e73ec39" containerName="nova-manage" Sep 29 19:27:36 crc kubenswrapper[4779]: I0929 19:27:36.071616 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="b39d898a-ba9c-4d8f-96eb-6e7cb5bcf703" containerName="dnsmasq-dns" Sep 29 19:27:36 crc kubenswrapper[4779]: I0929 19:27:36.071624 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="4e3a6b3d-7933-45c0-934c-00846c0783d7" containerName="nova-cell1-conductor-db-sync" Sep 29 19:27:36 crc kubenswrapper[4779]: I0929 19:27:36.071639 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="d6ba8b5d-fee9-4010-b4ba-f6cf7b691a88" containerName="nova-api-api" Sep 29 19:27:36 crc kubenswrapper[4779]: I0929 19:27:36.072574 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Sep 29 19:27:36 crc kubenswrapper[4779]: E0929 19:27:36.073480 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"99f5ae8752e103200194b8828deb36d0cbdf83d662d24b97112332882c327f61\": container with ID starting with 99f5ae8752e103200194b8828deb36d0cbdf83d662d24b97112332882c327f61 not found: ID does not exist" containerID="99f5ae8752e103200194b8828deb36d0cbdf83d662d24b97112332882c327f61" Sep 29 19:27:36 crc kubenswrapper[4779]: I0929 19:27:36.073511 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"99f5ae8752e103200194b8828deb36d0cbdf83d662d24b97112332882c327f61"} err="failed to get container status \"99f5ae8752e103200194b8828deb36d0cbdf83d662d24b97112332882c327f61\": rpc error: code = NotFound desc = could not find container \"99f5ae8752e103200194b8828deb36d0cbdf83d662d24b97112332882c327f61\": container with ID starting with 99f5ae8752e103200194b8828deb36d0cbdf83d662d24b97112332882c327f61 not found: ID does not exist" Sep 29 19:27:36 crc kubenswrapper[4779]: I0929 19:27:36.073533 4779 scope.go:117] "RemoveContainer" containerID="97ee4126c9ec4201626463dce109def16f68fb077c4f49da0fcbc0d6a12a737d" Sep 29 19:27:36 crc kubenswrapper[4779]: I0929 19:27:36.074631 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Sep 29 19:27:36 crc kubenswrapper[4779]: E0929 19:27:36.075456 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"97ee4126c9ec4201626463dce109def16f68fb077c4f49da0fcbc0d6a12a737d\": container with ID starting with 97ee4126c9ec4201626463dce109def16f68fb077c4f49da0fcbc0d6a12a737d not found: ID does not exist" containerID="97ee4126c9ec4201626463dce109def16f68fb077c4f49da0fcbc0d6a12a737d" Sep 29 19:27:36 crc kubenswrapper[4779]: I0929 19:27:36.075507 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"97ee4126c9ec4201626463dce109def16f68fb077c4f49da0fcbc0d6a12a737d"} err="failed to get container status \"97ee4126c9ec4201626463dce109def16f68fb077c4f49da0fcbc0d6a12a737d\": rpc error: code = NotFound desc = could not find container \"97ee4126c9ec4201626463dce109def16f68fb077c4f49da0fcbc0d6a12a737d\": container with ID starting with 97ee4126c9ec4201626463dce109def16f68fb077c4f49da0fcbc0d6a12a737d not found: ID does not exist" Sep 29 19:27:36 crc kubenswrapper[4779]: I0929 19:27:36.075532 4779 scope.go:117] "RemoveContainer" containerID="99f5ae8752e103200194b8828deb36d0cbdf83d662d24b97112332882c327f61" Sep 29 19:27:36 crc kubenswrapper[4779]: I0929 19:27:36.077941 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"99f5ae8752e103200194b8828deb36d0cbdf83d662d24b97112332882c327f61"} err="failed to get container status \"99f5ae8752e103200194b8828deb36d0cbdf83d662d24b97112332882c327f61\": rpc error: code = NotFound desc = could not find container \"99f5ae8752e103200194b8828deb36d0cbdf83d662d24b97112332882c327f61\": container with ID starting with 99f5ae8752e103200194b8828deb36d0cbdf83d662d24b97112332882c327f61 not found: ID does not exist" Sep 29 19:27:36 crc kubenswrapper[4779]: I0929 19:27:36.077982 4779 scope.go:117] "RemoveContainer" containerID="97ee4126c9ec4201626463dce109def16f68fb077c4f49da0fcbc0d6a12a737d" Sep 29 19:27:36 crc kubenswrapper[4779]: I0929 19:27:36.078917 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"97ee4126c9ec4201626463dce109def16f68fb077c4f49da0fcbc0d6a12a737d"} err="failed to get container status \"97ee4126c9ec4201626463dce109def16f68fb077c4f49da0fcbc0d6a12a737d\": rpc error: code = NotFound desc = could not find container \"97ee4126c9ec4201626463dce109def16f68fb077c4f49da0fcbc0d6a12a737d\": container with ID starting with 97ee4126c9ec4201626463dce109def16f68fb077c4f49da0fcbc0d6a12a737d not found: ID does not exist" Sep 29 19:27:36 crc kubenswrapper[4779]: I0929 19:27:36.090203 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Sep 29 19:27:36 crc kubenswrapper[4779]: I0929 19:27:36.100240 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-0"] Sep 29 19:27:36 crc kubenswrapper[4779]: I0929 19:27:36.101474 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Sep 29 19:27:36 crc kubenswrapper[4779]: I0929 19:27:36.104070 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-config-data" Sep 29 19:27:36 crc kubenswrapper[4779]: I0929 19:27:36.109981 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Sep 29 19:27:36 crc kubenswrapper[4779]: I0929 19:27:36.134884 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b39d898a-ba9c-4d8f-96eb-6e7cb5bcf703-dns-svc\") pod \"b39d898a-ba9c-4d8f-96eb-6e7cb5bcf703\" (UID: \"b39d898a-ba9c-4d8f-96eb-6e7cb5bcf703\") " Sep 29 19:27:36 crc kubenswrapper[4779]: I0929 19:27:36.134978 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b39d898a-ba9c-4d8f-96eb-6e7cb5bcf703-ovsdbserver-sb\") pod \"b39d898a-ba9c-4d8f-96eb-6e7cb5bcf703\" (UID: \"b39d898a-ba9c-4d8f-96eb-6e7cb5bcf703\") " Sep 29 19:27:36 crc kubenswrapper[4779]: I0929 19:27:36.135050 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/b39d898a-ba9c-4d8f-96eb-6e7cb5bcf703-dns-swift-storage-0\") pod \"b39d898a-ba9c-4d8f-96eb-6e7cb5bcf703\" (UID: \"b39d898a-ba9c-4d8f-96eb-6e7cb5bcf703\") " Sep 29 19:27:36 crc kubenswrapper[4779]: I0929 19:27:36.135087 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7rntq\" (UniqueName: \"kubernetes.io/projected/b39d898a-ba9c-4d8f-96eb-6e7cb5bcf703-kube-api-access-7rntq\") pod \"b39d898a-ba9c-4d8f-96eb-6e7cb5bcf703\" (UID: \"b39d898a-ba9c-4d8f-96eb-6e7cb5bcf703\") " Sep 29 19:27:36 crc kubenswrapper[4779]: I0929 19:27:36.135385 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b39d898a-ba9c-4d8f-96eb-6e7cb5bcf703-ovsdbserver-nb\") pod \"b39d898a-ba9c-4d8f-96eb-6e7cb5bcf703\" (UID: \"b39d898a-ba9c-4d8f-96eb-6e7cb5bcf703\") " Sep 29 19:27:36 crc kubenswrapper[4779]: I0929 19:27:36.135416 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b39d898a-ba9c-4d8f-96eb-6e7cb5bcf703-config\") pod \"b39d898a-ba9c-4d8f-96eb-6e7cb5bcf703\" (UID: \"b39d898a-ba9c-4d8f-96eb-6e7cb5bcf703\") " Sep 29 19:27:36 crc kubenswrapper[4779]: I0929 19:27:36.139514 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b39d898a-ba9c-4d8f-96eb-6e7cb5bcf703-kube-api-access-7rntq" (OuterVolumeSpecName: "kube-api-access-7rntq") pod "b39d898a-ba9c-4d8f-96eb-6e7cb5bcf703" (UID: "b39d898a-ba9c-4d8f-96eb-6e7cb5bcf703"). InnerVolumeSpecName "kube-api-access-7rntq". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:27:36 crc kubenswrapper[4779]: I0929 19:27:36.183416 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b39d898a-ba9c-4d8f-96eb-6e7cb5bcf703-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "b39d898a-ba9c-4d8f-96eb-6e7cb5bcf703" (UID: "b39d898a-ba9c-4d8f-96eb-6e7cb5bcf703"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:27:36 crc kubenswrapper[4779]: I0929 19:27:36.187718 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b39d898a-ba9c-4d8f-96eb-6e7cb5bcf703-config" (OuterVolumeSpecName: "config") pod "b39d898a-ba9c-4d8f-96eb-6e7cb5bcf703" (UID: "b39d898a-ba9c-4d8f-96eb-6e7cb5bcf703"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:27:36 crc kubenswrapper[4779]: I0929 19:27:36.191602 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b39d898a-ba9c-4d8f-96eb-6e7cb5bcf703-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "b39d898a-ba9c-4d8f-96eb-6e7cb5bcf703" (UID: "b39d898a-ba9c-4d8f-96eb-6e7cb5bcf703"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:27:36 crc kubenswrapper[4779]: I0929 19:27:36.192079 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b39d898a-ba9c-4d8f-96eb-6e7cb5bcf703-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "b39d898a-ba9c-4d8f-96eb-6e7cb5bcf703" (UID: "b39d898a-ba9c-4d8f-96eb-6e7cb5bcf703"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:27:36 crc kubenswrapper[4779]: I0929 19:27:36.197894 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b39d898a-ba9c-4d8f-96eb-6e7cb5bcf703-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "b39d898a-ba9c-4d8f-96eb-6e7cb5bcf703" (UID: "b39d898a-ba9c-4d8f-96eb-6e7cb5bcf703"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:27:36 crc kubenswrapper[4779]: I0929 19:27:36.240363 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pxg96\" (UniqueName: \"kubernetes.io/projected/7a6eecd5-c53f-4f79-b8d4-ffd8527ce942-kube-api-access-pxg96\") pod \"nova-api-0\" (UID: \"7a6eecd5-c53f-4f79-b8d4-ffd8527ce942\") " pod="openstack/nova-api-0" Sep 29 19:27:36 crc kubenswrapper[4779]: I0929 19:27:36.240411 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7a6eecd5-c53f-4f79-b8d4-ffd8527ce942-logs\") pod \"nova-api-0\" (UID: \"7a6eecd5-c53f-4f79-b8d4-ffd8527ce942\") " pod="openstack/nova-api-0" Sep 29 19:27:36 crc kubenswrapper[4779]: I0929 19:27:36.240442 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7a6eecd5-c53f-4f79-b8d4-ffd8527ce942-config-data\") pod \"nova-api-0\" (UID: \"7a6eecd5-c53f-4f79-b8d4-ffd8527ce942\") " pod="openstack/nova-api-0" Sep 29 19:27:36 crc kubenswrapper[4779]: I0929 19:27:36.240475 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/87caa150-0994-48a8-816a-b6dccebf4092-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"87caa150-0994-48a8-816a-b6dccebf4092\") " pod="openstack/nova-cell1-conductor-0" Sep 29 19:27:36 crc kubenswrapper[4779]: I0929 19:27:36.240515 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zj6mf\" (UniqueName: \"kubernetes.io/projected/87caa150-0994-48a8-816a-b6dccebf4092-kube-api-access-zj6mf\") pod \"nova-cell1-conductor-0\" (UID: \"87caa150-0994-48a8-816a-b6dccebf4092\") " pod="openstack/nova-cell1-conductor-0" Sep 29 19:27:36 crc kubenswrapper[4779]: I0929 19:27:36.240547 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/87caa150-0994-48a8-816a-b6dccebf4092-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"87caa150-0994-48a8-816a-b6dccebf4092\") " pod="openstack/nova-cell1-conductor-0" Sep 29 19:27:36 crc kubenswrapper[4779]: I0929 19:27:36.240578 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7a6eecd5-c53f-4f79-b8d4-ffd8527ce942-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"7a6eecd5-c53f-4f79-b8d4-ffd8527ce942\") " pod="openstack/nova-api-0" Sep 29 19:27:36 crc kubenswrapper[4779]: I0929 19:27:36.240937 4779 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b39d898a-ba9c-4d8f-96eb-6e7cb5bcf703-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Sep 29 19:27:36 crc kubenswrapper[4779]: I0929 19:27:36.240969 4779 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b39d898a-ba9c-4d8f-96eb-6e7cb5bcf703-config\") on node \"crc\" DevicePath \"\"" Sep 29 19:27:36 crc kubenswrapper[4779]: I0929 19:27:36.241001 4779 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b39d898a-ba9c-4d8f-96eb-6e7cb5bcf703-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 29 19:27:36 crc kubenswrapper[4779]: I0929 19:27:36.241013 4779 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b39d898a-ba9c-4d8f-96eb-6e7cb5bcf703-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Sep 29 19:27:36 crc kubenswrapper[4779]: I0929 19:27:36.241028 4779 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/b39d898a-ba9c-4d8f-96eb-6e7cb5bcf703-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Sep 29 19:27:36 crc kubenswrapper[4779]: I0929 19:27:36.241041 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7rntq\" (UniqueName: \"kubernetes.io/projected/b39d898a-ba9c-4d8f-96eb-6e7cb5bcf703-kube-api-access-7rntq\") on node \"crc\" DevicePath \"\"" Sep 29 19:27:36 crc kubenswrapper[4779]: I0929 19:27:36.346797 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/87caa150-0994-48a8-816a-b6dccebf4092-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"87caa150-0994-48a8-816a-b6dccebf4092\") " pod="openstack/nova-cell1-conductor-0" Sep 29 19:27:36 crc kubenswrapper[4779]: I0929 19:27:36.346867 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7a6eecd5-c53f-4f79-b8d4-ffd8527ce942-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"7a6eecd5-c53f-4f79-b8d4-ffd8527ce942\") " pod="openstack/nova-api-0" Sep 29 19:27:36 crc kubenswrapper[4779]: I0929 19:27:36.347025 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pxg96\" (UniqueName: \"kubernetes.io/projected/7a6eecd5-c53f-4f79-b8d4-ffd8527ce942-kube-api-access-pxg96\") pod \"nova-api-0\" (UID: \"7a6eecd5-c53f-4f79-b8d4-ffd8527ce942\") " pod="openstack/nova-api-0" Sep 29 19:27:36 crc kubenswrapper[4779]: I0929 19:27:36.347056 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7a6eecd5-c53f-4f79-b8d4-ffd8527ce942-logs\") pod \"nova-api-0\" (UID: \"7a6eecd5-c53f-4f79-b8d4-ffd8527ce942\") " pod="openstack/nova-api-0" Sep 29 19:27:36 crc kubenswrapper[4779]: I0929 19:27:36.347098 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7a6eecd5-c53f-4f79-b8d4-ffd8527ce942-config-data\") pod \"nova-api-0\" (UID: \"7a6eecd5-c53f-4f79-b8d4-ffd8527ce942\") " pod="openstack/nova-api-0" Sep 29 19:27:36 crc kubenswrapper[4779]: I0929 19:27:36.347153 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/87caa150-0994-48a8-816a-b6dccebf4092-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"87caa150-0994-48a8-816a-b6dccebf4092\") " pod="openstack/nova-cell1-conductor-0" Sep 29 19:27:36 crc kubenswrapper[4779]: I0929 19:27:36.347203 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zj6mf\" (UniqueName: \"kubernetes.io/projected/87caa150-0994-48a8-816a-b6dccebf4092-kube-api-access-zj6mf\") pod \"nova-cell1-conductor-0\" (UID: \"87caa150-0994-48a8-816a-b6dccebf4092\") " pod="openstack/nova-cell1-conductor-0" Sep 29 19:27:36 crc kubenswrapper[4779]: I0929 19:27:36.347824 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7a6eecd5-c53f-4f79-b8d4-ffd8527ce942-logs\") pod \"nova-api-0\" (UID: \"7a6eecd5-c53f-4f79-b8d4-ffd8527ce942\") " pod="openstack/nova-api-0" Sep 29 19:27:36 crc kubenswrapper[4779]: I0929 19:27:36.352752 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7a6eecd5-c53f-4f79-b8d4-ffd8527ce942-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"7a6eecd5-c53f-4f79-b8d4-ffd8527ce942\") " pod="openstack/nova-api-0" Sep 29 19:27:36 crc kubenswrapper[4779]: I0929 19:27:36.355177 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/87caa150-0994-48a8-816a-b6dccebf4092-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"87caa150-0994-48a8-816a-b6dccebf4092\") " pod="openstack/nova-cell1-conductor-0" Sep 29 19:27:36 crc kubenswrapper[4779]: I0929 19:27:36.362952 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/87caa150-0994-48a8-816a-b6dccebf4092-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"87caa150-0994-48a8-816a-b6dccebf4092\") " pod="openstack/nova-cell1-conductor-0" Sep 29 19:27:36 crc kubenswrapper[4779]: I0929 19:27:36.364848 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zj6mf\" (UniqueName: \"kubernetes.io/projected/87caa150-0994-48a8-816a-b6dccebf4092-kube-api-access-zj6mf\") pod \"nova-cell1-conductor-0\" (UID: \"87caa150-0994-48a8-816a-b6dccebf4092\") " pod="openstack/nova-cell1-conductor-0" Sep 29 19:27:36 crc kubenswrapper[4779]: I0929 19:27:36.365652 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7a6eecd5-c53f-4f79-b8d4-ffd8527ce942-config-data\") pod \"nova-api-0\" (UID: \"7a6eecd5-c53f-4f79-b8d4-ffd8527ce942\") " pod="openstack/nova-api-0" Sep 29 19:27:36 crc kubenswrapper[4779]: I0929 19:27:36.370046 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pxg96\" (UniqueName: \"kubernetes.io/projected/7a6eecd5-c53f-4f79-b8d4-ffd8527ce942-kube-api-access-pxg96\") pod \"nova-api-0\" (UID: \"7a6eecd5-c53f-4f79-b8d4-ffd8527ce942\") " pod="openstack/nova-api-0" Sep 29 19:27:36 crc kubenswrapper[4779]: I0929 19:27:36.396204 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Sep 29 19:27:36 crc kubenswrapper[4779]: I0929 19:27:36.517962 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Sep 29 19:27:36 crc kubenswrapper[4779]: I0929 19:27:36.553515 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Sep 29 19:27:36 crc kubenswrapper[4779]: I0929 19:27:36.559851 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Sep 29 19:27:36 crc kubenswrapper[4779]: I0929 19:27:36.652209 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/92729eb4-c37d-4cd9-ae70-2e7b2c3f2c88-nova-metadata-tls-certs\") pod \"92729eb4-c37d-4cd9-ae70-2e7b2c3f2c88\" (UID: \"92729eb4-c37d-4cd9-ae70-2e7b2c3f2c88\") " Sep 29 19:27:36 crc kubenswrapper[4779]: I0929 19:27:36.652265 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/92729eb4-c37d-4cd9-ae70-2e7b2c3f2c88-logs\") pod \"92729eb4-c37d-4cd9-ae70-2e7b2c3f2c88\" (UID: \"92729eb4-c37d-4cd9-ae70-2e7b2c3f2c88\") " Sep 29 19:27:36 crc kubenswrapper[4779]: I0929 19:27:36.652380 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/92729eb4-c37d-4cd9-ae70-2e7b2c3f2c88-config-data\") pod \"92729eb4-c37d-4cd9-ae70-2e7b2c3f2c88\" (UID: \"92729eb4-c37d-4cd9-ae70-2e7b2c3f2c88\") " Sep 29 19:27:36 crc kubenswrapper[4779]: I0929 19:27:36.652517 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/da54bd62-a0d1-438e-b7c2-dae26d204540-config-data\") pod \"da54bd62-a0d1-438e-b7c2-dae26d204540\" (UID: \"da54bd62-a0d1-438e-b7c2-dae26d204540\") " Sep 29 19:27:36 crc kubenswrapper[4779]: I0929 19:27:36.652543 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/92729eb4-c37d-4cd9-ae70-2e7b2c3f2c88-combined-ca-bundle\") pod \"92729eb4-c37d-4cd9-ae70-2e7b2c3f2c88\" (UID: \"92729eb4-c37d-4cd9-ae70-2e7b2c3f2c88\") " Sep 29 19:27:36 crc kubenswrapper[4779]: I0929 19:27:36.652574 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vr2zr\" (UniqueName: \"kubernetes.io/projected/da54bd62-a0d1-438e-b7c2-dae26d204540-kube-api-access-vr2zr\") pod \"da54bd62-a0d1-438e-b7c2-dae26d204540\" (UID: \"da54bd62-a0d1-438e-b7c2-dae26d204540\") " Sep 29 19:27:36 crc kubenswrapper[4779]: I0929 19:27:36.652601 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bmqq7\" (UniqueName: \"kubernetes.io/projected/92729eb4-c37d-4cd9-ae70-2e7b2c3f2c88-kube-api-access-bmqq7\") pod \"92729eb4-c37d-4cd9-ae70-2e7b2c3f2c88\" (UID: \"92729eb4-c37d-4cd9-ae70-2e7b2c3f2c88\") " Sep 29 19:27:36 crc kubenswrapper[4779]: I0929 19:27:36.652654 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/da54bd62-a0d1-438e-b7c2-dae26d204540-combined-ca-bundle\") pod \"da54bd62-a0d1-438e-b7c2-dae26d204540\" (UID: \"da54bd62-a0d1-438e-b7c2-dae26d204540\") " Sep 29 19:27:36 crc kubenswrapper[4779]: I0929 19:27:36.660803 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/da54bd62-a0d1-438e-b7c2-dae26d204540-kube-api-access-vr2zr" (OuterVolumeSpecName: "kube-api-access-vr2zr") pod "da54bd62-a0d1-438e-b7c2-dae26d204540" (UID: "da54bd62-a0d1-438e-b7c2-dae26d204540"). InnerVolumeSpecName "kube-api-access-vr2zr". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:27:36 crc kubenswrapper[4779]: I0929 19:27:36.663562 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/92729eb4-c37d-4cd9-ae70-2e7b2c3f2c88-logs" (OuterVolumeSpecName: "logs") pod "92729eb4-c37d-4cd9-ae70-2e7b2c3f2c88" (UID: "92729eb4-c37d-4cd9-ae70-2e7b2c3f2c88"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 19:27:36 crc kubenswrapper[4779]: I0929 19:27:36.665802 4779 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/92729eb4-c37d-4cd9-ae70-2e7b2c3f2c88-logs\") on node \"crc\" DevicePath \"\"" Sep 29 19:27:36 crc kubenswrapper[4779]: I0929 19:27:36.665848 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vr2zr\" (UniqueName: \"kubernetes.io/projected/da54bd62-a0d1-438e-b7c2-dae26d204540-kube-api-access-vr2zr\") on node \"crc\" DevicePath \"\"" Sep 29 19:27:36 crc kubenswrapper[4779]: I0929 19:27:36.678542 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/92729eb4-c37d-4cd9-ae70-2e7b2c3f2c88-kube-api-access-bmqq7" (OuterVolumeSpecName: "kube-api-access-bmqq7") pod "92729eb4-c37d-4cd9-ae70-2e7b2c3f2c88" (UID: "92729eb4-c37d-4cd9-ae70-2e7b2c3f2c88"). InnerVolumeSpecName "kube-api-access-bmqq7". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:27:36 crc kubenswrapper[4779]: I0929 19:27:36.682078 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/92729eb4-c37d-4cd9-ae70-2e7b2c3f2c88-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "92729eb4-c37d-4cd9-ae70-2e7b2c3f2c88" (UID: "92729eb4-c37d-4cd9-ae70-2e7b2c3f2c88"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:27:36 crc kubenswrapper[4779]: I0929 19:27:36.683196 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/da54bd62-a0d1-438e-b7c2-dae26d204540-config-data" (OuterVolumeSpecName: "config-data") pod "da54bd62-a0d1-438e-b7c2-dae26d204540" (UID: "da54bd62-a0d1-438e-b7c2-dae26d204540"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:27:36 crc kubenswrapper[4779]: I0929 19:27:36.686241 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/92729eb4-c37d-4cd9-ae70-2e7b2c3f2c88-config-data" (OuterVolumeSpecName: "config-data") pod "92729eb4-c37d-4cd9-ae70-2e7b2c3f2c88" (UID: "92729eb4-c37d-4cd9-ae70-2e7b2c3f2c88"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:27:36 crc kubenswrapper[4779]: I0929 19:27:36.690235 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/da54bd62-a0d1-438e-b7c2-dae26d204540-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "da54bd62-a0d1-438e-b7c2-dae26d204540" (UID: "da54bd62-a0d1-438e-b7c2-dae26d204540"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:27:36 crc kubenswrapper[4779]: I0929 19:27:36.714156 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/92729eb4-c37d-4cd9-ae70-2e7b2c3f2c88-nova-metadata-tls-certs" (OuterVolumeSpecName: "nova-metadata-tls-certs") pod "92729eb4-c37d-4cd9-ae70-2e7b2c3f2c88" (UID: "92729eb4-c37d-4cd9-ae70-2e7b2c3f2c88"). InnerVolumeSpecName "nova-metadata-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:27:36 crc kubenswrapper[4779]: I0929 19:27:36.767614 4779 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/da54bd62-a0d1-438e-b7c2-dae26d204540-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 19:27:36 crc kubenswrapper[4779]: I0929 19:27:36.767651 4779 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/92729eb4-c37d-4cd9-ae70-2e7b2c3f2c88-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 19:27:36 crc kubenswrapper[4779]: I0929 19:27:36.767668 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bmqq7\" (UniqueName: \"kubernetes.io/projected/92729eb4-c37d-4cd9-ae70-2e7b2c3f2c88-kube-api-access-bmqq7\") on node \"crc\" DevicePath \"\"" Sep 29 19:27:36 crc kubenswrapper[4779]: I0929 19:27:36.767681 4779 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/da54bd62-a0d1-438e-b7c2-dae26d204540-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 19:27:36 crc kubenswrapper[4779]: I0929 19:27:36.767692 4779 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/92729eb4-c37d-4cd9-ae70-2e7b2c3f2c88-nova-metadata-tls-certs\") on node \"crc\" DevicePath \"\"" Sep 29 19:27:36 crc kubenswrapper[4779]: I0929 19:27:36.767702 4779 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/92729eb4-c37d-4cd9-ae70-2e7b2c3f2c88-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 19:27:36 crc kubenswrapper[4779]: I0929 19:27:36.905189 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Sep 29 19:27:36 crc kubenswrapper[4779]: I0929 19:27:36.974159 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5784cf869f-nx69q" event={"ID":"b39d898a-ba9c-4d8f-96eb-6e7cb5bcf703","Type":"ContainerDied","Data":"dfabc8ae8c47711a922efea5c1fafac05c7d962ec11901497f1ba77ae90c5a1d"} Sep 29 19:27:36 crc kubenswrapper[4779]: I0929 19:27:36.974533 4779 scope.go:117] "RemoveContainer" containerID="1be1062c512f6b7378ae42a72eb7f0e278c7a5b9b52463e15a34d91db5542732" Sep 29 19:27:36 crc kubenswrapper[4779]: I0929 19:27:36.974661 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5784cf869f-nx69q" Sep 29 19:27:36 crc kubenswrapper[4779]: I0929 19:27:36.983775 4779 generic.go:334] "Generic (PLEG): container finished" podID="da54bd62-a0d1-438e-b7c2-dae26d204540" containerID="ba0ab139f0b098b085398c9159daa1e0a7ce62d4c53cc66a00a63f4c0b4c9a75" exitCode=0 Sep 29 19:27:36 crc kubenswrapper[4779]: I0929 19:27:36.983896 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Sep 29 19:27:36 crc kubenswrapper[4779]: I0929 19:27:36.983891 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"da54bd62-a0d1-438e-b7c2-dae26d204540","Type":"ContainerDied","Data":"ba0ab139f0b098b085398c9159daa1e0a7ce62d4c53cc66a00a63f4c0b4c9a75"} Sep 29 19:27:36 crc kubenswrapper[4779]: I0929 19:27:36.984033 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"da54bd62-a0d1-438e-b7c2-dae26d204540","Type":"ContainerDied","Data":"5eb4c2937f56807a68144dc34e69402cbb0b7cda5dadf24e6ddfcda95cf046e3"} Sep 29 19:27:36 crc kubenswrapper[4779]: I0929 19:27:36.989085 4779 generic.go:334] "Generic (PLEG): container finished" podID="92729eb4-c37d-4cd9-ae70-2e7b2c3f2c88" containerID="631bc481834079f3afe6a370d9ddbbf57e39abfd564144c0d70300fe7e712ac9" exitCode=0 Sep 29 19:27:36 crc kubenswrapper[4779]: I0929 19:27:36.989369 4779 generic.go:334] "Generic (PLEG): container finished" podID="92729eb4-c37d-4cd9-ae70-2e7b2c3f2c88" containerID="1ffcf561a7a1ecbd202e37769ddebaf117fb29939b84b85de7d27e08a93dd503" exitCode=143 Sep 29 19:27:36 crc kubenswrapper[4779]: I0929 19:27:36.989150 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Sep 29 19:27:36 crc kubenswrapper[4779]: I0929 19:27:36.989140 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"92729eb4-c37d-4cd9-ae70-2e7b2c3f2c88","Type":"ContainerDied","Data":"631bc481834079f3afe6a370d9ddbbf57e39abfd564144c0d70300fe7e712ac9"} Sep 29 19:27:36 crc kubenswrapper[4779]: I0929 19:27:36.989713 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"92729eb4-c37d-4cd9-ae70-2e7b2c3f2c88","Type":"ContainerDied","Data":"1ffcf561a7a1ecbd202e37769ddebaf117fb29939b84b85de7d27e08a93dd503"} Sep 29 19:27:36 crc kubenswrapper[4779]: I0929 19:27:36.989731 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"92729eb4-c37d-4cd9-ae70-2e7b2c3f2c88","Type":"ContainerDied","Data":"590ca9dc4747fab9297cb348341ea0cbc9f4cfac7deac52b4004ca5eb8b8806c"} Sep 29 19:27:36 crc kubenswrapper[4779]: I0929 19:27:36.992472 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"7a6eecd5-c53f-4f79-b8d4-ffd8527ce942","Type":"ContainerStarted","Data":"a41ee5aed87fad2bef4f11d37c69417d37a2d73c1edf22efd65a3c29ad982889"} Sep 29 19:27:36 crc kubenswrapper[4779]: I0929 19:27:36.999434 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Sep 29 19:27:37 crc kubenswrapper[4779]: I0929 19:27:37.013515 4779 scope.go:117] "RemoveContainer" containerID="ef1d492997760e0a5a2febf8aec640ad661aef8d273d66648e3ead1e77baa80a" Sep 29 19:27:37 crc kubenswrapper[4779]: I0929 19:27:37.034502 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Sep 29 19:27:37 crc kubenswrapper[4779]: I0929 19:27:37.056085 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Sep 29 19:27:37 crc kubenswrapper[4779]: I0929 19:27:37.071043 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5784cf869f-nx69q"] Sep 29 19:27:37 crc kubenswrapper[4779]: I0929 19:27:37.079567 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5784cf869f-nx69q"] Sep 29 19:27:37 crc kubenswrapper[4779]: I0929 19:27:37.087822 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Sep 29 19:27:37 crc kubenswrapper[4779]: E0929 19:27:37.088234 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="92729eb4-c37d-4cd9-ae70-2e7b2c3f2c88" containerName="nova-metadata-log" Sep 29 19:27:37 crc kubenswrapper[4779]: I0929 19:27:37.088255 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="92729eb4-c37d-4cd9-ae70-2e7b2c3f2c88" containerName="nova-metadata-log" Sep 29 19:27:37 crc kubenswrapper[4779]: E0929 19:27:37.088282 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="92729eb4-c37d-4cd9-ae70-2e7b2c3f2c88" containerName="nova-metadata-metadata" Sep 29 19:27:37 crc kubenswrapper[4779]: I0929 19:27:37.088290 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="92729eb4-c37d-4cd9-ae70-2e7b2c3f2c88" containerName="nova-metadata-metadata" Sep 29 19:27:37 crc kubenswrapper[4779]: E0929 19:27:37.088340 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="da54bd62-a0d1-438e-b7c2-dae26d204540" containerName="nova-scheduler-scheduler" Sep 29 19:27:37 crc kubenswrapper[4779]: I0929 19:27:37.088350 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="da54bd62-a0d1-438e-b7c2-dae26d204540" containerName="nova-scheduler-scheduler" Sep 29 19:27:37 crc kubenswrapper[4779]: I0929 19:27:37.088577 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="92729eb4-c37d-4cd9-ae70-2e7b2c3f2c88" containerName="nova-metadata-metadata" Sep 29 19:27:37 crc kubenswrapper[4779]: I0929 19:27:37.088609 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="92729eb4-c37d-4cd9-ae70-2e7b2c3f2c88" containerName="nova-metadata-log" Sep 29 19:27:37 crc kubenswrapper[4779]: I0929 19:27:37.088626 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="da54bd62-a0d1-438e-b7c2-dae26d204540" containerName="nova-scheduler-scheduler" Sep 29 19:27:37 crc kubenswrapper[4779]: I0929 19:27:37.089906 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Sep 29 19:27:37 crc kubenswrapper[4779]: I0929 19:27:37.090926 4779 scope.go:117] "RemoveContainer" containerID="ba0ab139f0b098b085398c9159daa1e0a7ce62d4c53cc66a00a63f4c0b4c9a75" Sep 29 19:27:37 crc kubenswrapper[4779]: I0929 19:27:37.092373 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Sep 29 19:27:37 crc kubenswrapper[4779]: I0929 19:27:37.093414 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Sep 29 19:27:37 crc kubenswrapper[4779]: I0929 19:27:37.124199 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Sep 29 19:27:37 crc kubenswrapper[4779]: I0929 19:27:37.131580 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Sep 29 19:27:37 crc kubenswrapper[4779]: I0929 19:27:37.157450 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Sep 29 19:27:37 crc kubenswrapper[4779]: I0929 19:27:37.173591 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Sep 29 19:27:37 crc kubenswrapper[4779]: I0929 19:27:37.174790 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Sep 29 19:27:37 crc kubenswrapper[4779]: I0929 19:27:37.177374 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Sep 29 19:27:37 crc kubenswrapper[4779]: I0929 19:27:37.192466 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Sep 29 19:27:37 crc kubenswrapper[4779]: I0929 19:27:37.196545 4779 scope.go:117] "RemoveContainer" containerID="ba0ab139f0b098b085398c9159daa1e0a7ce62d4c53cc66a00a63f4c0b4c9a75" Sep 29 19:27:37 crc kubenswrapper[4779]: E0929 19:27:37.201187 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ba0ab139f0b098b085398c9159daa1e0a7ce62d4c53cc66a00a63f4c0b4c9a75\": container with ID starting with ba0ab139f0b098b085398c9159daa1e0a7ce62d4c53cc66a00a63f4c0b4c9a75 not found: ID does not exist" containerID="ba0ab139f0b098b085398c9159daa1e0a7ce62d4c53cc66a00a63f4c0b4c9a75" Sep 29 19:27:37 crc kubenswrapper[4779]: I0929 19:27:37.201227 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ba0ab139f0b098b085398c9159daa1e0a7ce62d4c53cc66a00a63f4c0b4c9a75"} err="failed to get container status \"ba0ab139f0b098b085398c9159daa1e0a7ce62d4c53cc66a00a63f4c0b4c9a75\": rpc error: code = NotFound desc = could not find container \"ba0ab139f0b098b085398c9159daa1e0a7ce62d4c53cc66a00a63f4c0b4c9a75\": container with ID starting with ba0ab139f0b098b085398c9159daa1e0a7ce62d4c53cc66a00a63f4c0b4c9a75 not found: ID does not exist" Sep 29 19:27:37 crc kubenswrapper[4779]: I0929 19:27:37.201250 4779 scope.go:117] "RemoveContainer" containerID="631bc481834079f3afe6a370d9ddbbf57e39abfd564144c0d70300fe7e712ac9" Sep 29 19:27:37 crc kubenswrapper[4779]: I0929 19:27:37.281006 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/47bbd60f-e99d-48c8-94ba-908fc7162be3-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"47bbd60f-e99d-48c8-94ba-908fc7162be3\") " pod="openstack/nova-metadata-0" Sep 29 19:27:37 crc kubenswrapper[4779]: I0929 19:27:37.281057 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-74j6d\" (UniqueName: \"kubernetes.io/projected/57f1ca09-c341-4f3c-a1e0-e9f27777601f-kube-api-access-74j6d\") pod \"nova-scheduler-0\" (UID: \"57f1ca09-c341-4f3c-a1e0-e9f27777601f\") " pod="openstack/nova-scheduler-0" Sep 29 19:27:37 crc kubenswrapper[4779]: I0929 19:27:37.281088 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/57f1ca09-c341-4f3c-a1e0-e9f27777601f-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"57f1ca09-c341-4f3c-a1e0-e9f27777601f\") " pod="openstack/nova-scheduler-0" Sep 29 19:27:37 crc kubenswrapper[4779]: I0929 19:27:37.281111 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/47bbd60f-e99d-48c8-94ba-908fc7162be3-logs\") pod \"nova-metadata-0\" (UID: \"47bbd60f-e99d-48c8-94ba-908fc7162be3\") " pod="openstack/nova-metadata-0" Sep 29 19:27:37 crc kubenswrapper[4779]: I0929 19:27:37.281127 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/57f1ca09-c341-4f3c-a1e0-e9f27777601f-config-data\") pod \"nova-scheduler-0\" (UID: \"57f1ca09-c341-4f3c-a1e0-e9f27777601f\") " pod="openstack/nova-scheduler-0" Sep 29 19:27:37 crc kubenswrapper[4779]: I0929 19:27:37.281157 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r6d4c\" (UniqueName: \"kubernetes.io/projected/47bbd60f-e99d-48c8-94ba-908fc7162be3-kube-api-access-r6d4c\") pod \"nova-metadata-0\" (UID: \"47bbd60f-e99d-48c8-94ba-908fc7162be3\") " pod="openstack/nova-metadata-0" Sep 29 19:27:37 crc kubenswrapper[4779]: I0929 19:27:37.281205 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/47bbd60f-e99d-48c8-94ba-908fc7162be3-config-data\") pod \"nova-metadata-0\" (UID: \"47bbd60f-e99d-48c8-94ba-908fc7162be3\") " pod="openstack/nova-metadata-0" Sep 29 19:27:37 crc kubenswrapper[4779]: I0929 19:27:37.281271 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/47bbd60f-e99d-48c8-94ba-908fc7162be3-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"47bbd60f-e99d-48c8-94ba-908fc7162be3\") " pod="openstack/nova-metadata-0" Sep 29 19:27:37 crc kubenswrapper[4779]: I0929 19:27:37.299376 4779 scope.go:117] "RemoveContainer" containerID="1ffcf561a7a1ecbd202e37769ddebaf117fb29939b84b85de7d27e08a93dd503" Sep 29 19:27:37 crc kubenswrapper[4779]: I0929 19:27:37.314826 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-novncproxy-0" Sep 29 19:27:37 crc kubenswrapper[4779]: I0929 19:27:37.382639 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/47bbd60f-e99d-48c8-94ba-908fc7162be3-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"47bbd60f-e99d-48c8-94ba-908fc7162be3\") " pod="openstack/nova-metadata-0" Sep 29 19:27:37 crc kubenswrapper[4779]: I0929 19:27:37.382700 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/47bbd60f-e99d-48c8-94ba-908fc7162be3-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"47bbd60f-e99d-48c8-94ba-908fc7162be3\") " pod="openstack/nova-metadata-0" Sep 29 19:27:37 crc kubenswrapper[4779]: I0929 19:27:37.382729 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-74j6d\" (UniqueName: \"kubernetes.io/projected/57f1ca09-c341-4f3c-a1e0-e9f27777601f-kube-api-access-74j6d\") pod \"nova-scheduler-0\" (UID: \"57f1ca09-c341-4f3c-a1e0-e9f27777601f\") " pod="openstack/nova-scheduler-0" Sep 29 19:27:37 crc kubenswrapper[4779]: I0929 19:27:37.382758 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/57f1ca09-c341-4f3c-a1e0-e9f27777601f-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"57f1ca09-c341-4f3c-a1e0-e9f27777601f\") " pod="openstack/nova-scheduler-0" Sep 29 19:27:37 crc kubenswrapper[4779]: I0929 19:27:37.382782 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/47bbd60f-e99d-48c8-94ba-908fc7162be3-logs\") pod \"nova-metadata-0\" (UID: \"47bbd60f-e99d-48c8-94ba-908fc7162be3\") " pod="openstack/nova-metadata-0" Sep 29 19:27:37 crc kubenswrapper[4779]: I0929 19:27:37.382799 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/57f1ca09-c341-4f3c-a1e0-e9f27777601f-config-data\") pod \"nova-scheduler-0\" (UID: \"57f1ca09-c341-4f3c-a1e0-e9f27777601f\") " pod="openstack/nova-scheduler-0" Sep 29 19:27:37 crc kubenswrapper[4779]: I0929 19:27:37.382827 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r6d4c\" (UniqueName: \"kubernetes.io/projected/47bbd60f-e99d-48c8-94ba-908fc7162be3-kube-api-access-r6d4c\") pod \"nova-metadata-0\" (UID: \"47bbd60f-e99d-48c8-94ba-908fc7162be3\") " pod="openstack/nova-metadata-0" Sep 29 19:27:37 crc kubenswrapper[4779]: I0929 19:27:37.382872 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/47bbd60f-e99d-48c8-94ba-908fc7162be3-config-data\") pod \"nova-metadata-0\" (UID: \"47bbd60f-e99d-48c8-94ba-908fc7162be3\") " pod="openstack/nova-metadata-0" Sep 29 19:27:37 crc kubenswrapper[4779]: I0929 19:27:37.383502 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/47bbd60f-e99d-48c8-94ba-908fc7162be3-logs\") pod \"nova-metadata-0\" (UID: \"47bbd60f-e99d-48c8-94ba-908fc7162be3\") " pod="openstack/nova-metadata-0" Sep 29 19:27:37 crc kubenswrapper[4779]: I0929 19:27:37.388213 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/47bbd60f-e99d-48c8-94ba-908fc7162be3-config-data\") pod \"nova-metadata-0\" (UID: \"47bbd60f-e99d-48c8-94ba-908fc7162be3\") " pod="openstack/nova-metadata-0" Sep 29 19:27:37 crc kubenswrapper[4779]: I0929 19:27:37.388735 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/47bbd60f-e99d-48c8-94ba-908fc7162be3-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"47bbd60f-e99d-48c8-94ba-908fc7162be3\") " pod="openstack/nova-metadata-0" Sep 29 19:27:37 crc kubenswrapper[4779]: I0929 19:27:37.389031 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/47bbd60f-e99d-48c8-94ba-908fc7162be3-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"47bbd60f-e99d-48c8-94ba-908fc7162be3\") " pod="openstack/nova-metadata-0" Sep 29 19:27:37 crc kubenswrapper[4779]: I0929 19:27:37.389860 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/57f1ca09-c341-4f3c-a1e0-e9f27777601f-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"57f1ca09-c341-4f3c-a1e0-e9f27777601f\") " pod="openstack/nova-scheduler-0" Sep 29 19:27:37 crc kubenswrapper[4779]: I0929 19:27:37.391824 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/57f1ca09-c341-4f3c-a1e0-e9f27777601f-config-data\") pod \"nova-scheduler-0\" (UID: \"57f1ca09-c341-4f3c-a1e0-e9f27777601f\") " pod="openstack/nova-scheduler-0" Sep 29 19:27:37 crc kubenswrapper[4779]: I0929 19:27:37.399086 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-74j6d\" (UniqueName: \"kubernetes.io/projected/57f1ca09-c341-4f3c-a1e0-e9f27777601f-kube-api-access-74j6d\") pod \"nova-scheduler-0\" (UID: \"57f1ca09-c341-4f3c-a1e0-e9f27777601f\") " pod="openstack/nova-scheduler-0" Sep 29 19:27:37 crc kubenswrapper[4779]: I0929 19:27:37.405851 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r6d4c\" (UniqueName: \"kubernetes.io/projected/47bbd60f-e99d-48c8-94ba-908fc7162be3-kube-api-access-r6d4c\") pod \"nova-metadata-0\" (UID: \"47bbd60f-e99d-48c8-94ba-908fc7162be3\") " pod="openstack/nova-metadata-0" Sep 29 19:27:37 crc kubenswrapper[4779]: I0929 19:27:37.442822 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Sep 29 19:27:37 crc kubenswrapper[4779]: I0929 19:27:37.458268 4779 scope.go:117] "RemoveContainer" containerID="631bc481834079f3afe6a370d9ddbbf57e39abfd564144c0d70300fe7e712ac9" Sep 29 19:27:37 crc kubenswrapper[4779]: E0929 19:27:37.458977 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"631bc481834079f3afe6a370d9ddbbf57e39abfd564144c0d70300fe7e712ac9\": container with ID starting with 631bc481834079f3afe6a370d9ddbbf57e39abfd564144c0d70300fe7e712ac9 not found: ID does not exist" containerID="631bc481834079f3afe6a370d9ddbbf57e39abfd564144c0d70300fe7e712ac9" Sep 29 19:27:37 crc kubenswrapper[4779]: I0929 19:27:37.459072 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"631bc481834079f3afe6a370d9ddbbf57e39abfd564144c0d70300fe7e712ac9"} err="failed to get container status \"631bc481834079f3afe6a370d9ddbbf57e39abfd564144c0d70300fe7e712ac9\": rpc error: code = NotFound desc = could not find container \"631bc481834079f3afe6a370d9ddbbf57e39abfd564144c0d70300fe7e712ac9\": container with ID starting with 631bc481834079f3afe6a370d9ddbbf57e39abfd564144c0d70300fe7e712ac9 not found: ID does not exist" Sep 29 19:27:37 crc kubenswrapper[4779]: I0929 19:27:37.459145 4779 scope.go:117] "RemoveContainer" containerID="1ffcf561a7a1ecbd202e37769ddebaf117fb29939b84b85de7d27e08a93dd503" Sep 29 19:27:37 crc kubenswrapper[4779]: E0929 19:27:37.459926 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1ffcf561a7a1ecbd202e37769ddebaf117fb29939b84b85de7d27e08a93dd503\": container with ID starting with 1ffcf561a7a1ecbd202e37769ddebaf117fb29939b84b85de7d27e08a93dd503 not found: ID does not exist" containerID="1ffcf561a7a1ecbd202e37769ddebaf117fb29939b84b85de7d27e08a93dd503" Sep 29 19:27:37 crc kubenswrapper[4779]: I0929 19:27:37.460003 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1ffcf561a7a1ecbd202e37769ddebaf117fb29939b84b85de7d27e08a93dd503"} err="failed to get container status \"1ffcf561a7a1ecbd202e37769ddebaf117fb29939b84b85de7d27e08a93dd503\": rpc error: code = NotFound desc = could not find container \"1ffcf561a7a1ecbd202e37769ddebaf117fb29939b84b85de7d27e08a93dd503\": container with ID starting with 1ffcf561a7a1ecbd202e37769ddebaf117fb29939b84b85de7d27e08a93dd503 not found: ID does not exist" Sep 29 19:27:37 crc kubenswrapper[4779]: I0929 19:27:37.460031 4779 scope.go:117] "RemoveContainer" containerID="631bc481834079f3afe6a370d9ddbbf57e39abfd564144c0d70300fe7e712ac9" Sep 29 19:27:37 crc kubenswrapper[4779]: I0929 19:27:37.460473 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"631bc481834079f3afe6a370d9ddbbf57e39abfd564144c0d70300fe7e712ac9"} err="failed to get container status \"631bc481834079f3afe6a370d9ddbbf57e39abfd564144c0d70300fe7e712ac9\": rpc error: code = NotFound desc = could not find container \"631bc481834079f3afe6a370d9ddbbf57e39abfd564144c0d70300fe7e712ac9\": container with ID starting with 631bc481834079f3afe6a370d9ddbbf57e39abfd564144c0d70300fe7e712ac9 not found: ID does not exist" Sep 29 19:27:37 crc kubenswrapper[4779]: I0929 19:27:37.460540 4779 scope.go:117] "RemoveContainer" containerID="1ffcf561a7a1ecbd202e37769ddebaf117fb29939b84b85de7d27e08a93dd503" Sep 29 19:27:37 crc kubenswrapper[4779]: I0929 19:27:37.461075 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1ffcf561a7a1ecbd202e37769ddebaf117fb29939b84b85de7d27e08a93dd503"} err="failed to get container status \"1ffcf561a7a1ecbd202e37769ddebaf117fb29939b84b85de7d27e08a93dd503\": rpc error: code = NotFound desc = could not find container \"1ffcf561a7a1ecbd202e37769ddebaf117fb29939b84b85de7d27e08a93dd503\": container with ID starting with 1ffcf561a7a1ecbd202e37769ddebaf117fb29939b84b85de7d27e08a93dd503 not found: ID does not exist" Sep 29 19:27:37 crc kubenswrapper[4779]: W0929 19:27:37.520930 4779 watcher.go:93] Error while processing event ("/sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4e3a6b3d_7933_45c0_934c_00846c0783d7.slice": 0x40000100 == IN_CREATE|IN_ISDIR): inotify_add_watch /sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4e3a6b3d_7933_45c0_934c_00846c0783d7.slice: no such file or directory Sep 29 19:27:37 crc kubenswrapper[4779]: W0929 19:27:37.521427 4779 watcher.go:93] Error while processing event ("/sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podda54bd62_a0d1_438e_b7c2_dae26d204540.slice/crio-5eb4c2937f56807a68144dc34e69402cbb0b7cda5dadf24e6ddfcda95cf046e3": 0x40000100 == IN_CREATE|IN_ISDIR): inotify_add_watch /sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podda54bd62_a0d1_438e_b7c2_dae26d204540.slice/crio-5eb4c2937f56807a68144dc34e69402cbb0b7cda5dadf24e6ddfcda95cf046e3: no such file or directory Sep 29 19:27:37 crc kubenswrapper[4779]: W0929 19:27:37.521507 4779 watcher.go:93] Error while processing event ("/sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podbd57dbe5_edf2_4209_aec4_255ef19013ff.slice/crio-d317abe7e8c3d4afc8f8111156a2b0653c7497997a38023159b98f0bae634a4c": 0x40000100 == IN_CREATE|IN_ISDIR): inotify_add_watch /sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podbd57dbe5_edf2_4209_aec4_255ef19013ff.slice/crio-d317abe7e8c3d4afc8f8111156a2b0653c7497997a38023159b98f0bae634a4c: no such file or directory Sep 29 19:27:37 crc kubenswrapper[4779]: W0929 19:27:37.521551 4779 watcher.go:93] Error while processing event ("/sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda8357f7c_f656_4c05_b8ab_cc93c82a1b7c.slice/crio-c3f429a0fd792ddcf9f9953900bd9eca404a5430215c31b8cfec58982901ee8d": 0x40000100 == IN_CREATE|IN_ISDIR): inotify_add_watch /sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda8357f7c_f656_4c05_b8ab_cc93c82a1b7c.slice/crio-c3f429a0fd792ddcf9f9953900bd9eca404a5430215c31b8cfec58982901ee8d: no such file or directory Sep 29 19:27:37 crc kubenswrapper[4779]: W0929 19:27:37.521769 4779 watcher.go:93] Error while processing event ("/sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podbac34d02_e097_4b82_afda_9b2d885c6fa4.slice/crio-conmon-acfdc443fa3fae973aab723aa20f246dacd2162251a5997d5b312a762757de35.scope": 0x40000100 == IN_CREATE|IN_ISDIR): inotify_add_watch /sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podbac34d02_e097_4b82_afda_9b2d885c6fa4.slice/crio-conmon-acfdc443fa3fae973aab723aa20f246dacd2162251a5997d5b312a762757de35.scope: no such file or directory Sep 29 19:27:37 crc kubenswrapper[4779]: W0929 19:27:37.521817 4779 watcher.go:93] Error while processing event ("/sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podbac34d02_e097_4b82_afda_9b2d885c6fa4.slice/crio-acfdc443fa3fae973aab723aa20f246dacd2162251a5997d5b312a762757de35.scope": 0x40000100 == IN_CREATE|IN_ISDIR): inotify_add_watch /sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podbac34d02_e097_4b82_afda_9b2d885c6fa4.slice/crio-acfdc443fa3fae973aab723aa20f246dacd2162251a5997d5b312a762757de35.scope: no such file or directory Sep 29 19:27:37 crc kubenswrapper[4779]: W0929 19:27:37.522166 4779 watcher.go:93] Error while processing event ("/sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd6ba8b5d_fee9_4010_b4ba_f6cf7b691a88.slice/crio-conmon-97ee4126c9ec4201626463dce109def16f68fb077c4f49da0fcbc0d6a12a737d.scope": 0x40000100 == IN_CREATE|IN_ISDIR): inotify_add_watch /sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd6ba8b5d_fee9_4010_b4ba_f6cf7b691a88.slice/crio-conmon-97ee4126c9ec4201626463dce109def16f68fb077c4f49da0fcbc0d6a12a737d.scope: no such file or directory Sep 29 19:27:37 crc kubenswrapper[4779]: W0929 19:27:37.522227 4779 watcher.go:93] Error while processing event ("/sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda8357f7c_f656_4c05_b8ab_cc93c82a1b7c.slice/crio-conmon-a2e947d718363fd1a40e9c4d85c1c795e21e7a1a20250e8407ef63be7cb690b3.scope": 0x40000100 == IN_CREATE|IN_ISDIR): inotify_add_watch /sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda8357f7c_f656_4c05_b8ab_cc93c82a1b7c.slice/crio-conmon-a2e947d718363fd1a40e9c4d85c1c795e21e7a1a20250e8407ef63be7cb690b3.scope: no such file or directory Sep 29 19:27:37 crc kubenswrapper[4779]: W0929 19:27:37.522268 4779 watcher.go:93] Error while processing event ("/sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podbd57dbe5_edf2_4209_aec4_255ef19013ff.slice/crio-conmon-b4bdefe484633d644d4fb6e23f38a95a3d0e5d19997622f8cbab1b2b9e5dbf32.scope": 0x40000100 == IN_CREATE|IN_ISDIR): inotify_add_watch /sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podbd57dbe5_edf2_4209_aec4_255ef19013ff.slice/crio-conmon-b4bdefe484633d644d4fb6e23f38a95a3d0e5d19997622f8cbab1b2b9e5dbf32.scope: no such file or directory Sep 29 19:27:37 crc kubenswrapper[4779]: W0929 19:27:37.522296 4779 watcher.go:93] Error while processing event ("/sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podda54bd62_a0d1_438e_b7c2_dae26d204540.slice/crio-conmon-ba0ab139f0b098b085398c9159daa1e0a7ce62d4c53cc66a00a63f4c0b4c9a75.scope": 0x40000100 == IN_CREATE|IN_ISDIR): inotify_add_watch /sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podda54bd62_a0d1_438e_b7c2_dae26d204540.slice/crio-conmon-ba0ab139f0b098b085398c9159daa1e0a7ce62d4c53cc66a00a63f4c0b4c9a75.scope: no such file or directory Sep 29 19:27:37 crc kubenswrapper[4779]: W0929 19:27:37.522354 4779 watcher.go:93] Error while processing event ("/sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd6ba8b5d_fee9_4010_b4ba_f6cf7b691a88.slice/crio-97ee4126c9ec4201626463dce109def16f68fb077c4f49da0fcbc0d6a12a737d.scope": 0x40000100 == IN_CREATE|IN_ISDIR): inotify_add_watch /sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd6ba8b5d_fee9_4010_b4ba_f6cf7b691a88.slice/crio-97ee4126c9ec4201626463dce109def16f68fb077c4f49da0fcbc0d6a12a737d.scope: no such file or directory Sep 29 19:27:37 crc kubenswrapper[4779]: W0929 19:27:37.522380 4779 watcher.go:93] Error while processing event ("/sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda8357f7c_f656_4c05_b8ab_cc93c82a1b7c.slice/crio-a2e947d718363fd1a40e9c4d85c1c795e21e7a1a20250e8407ef63be7cb690b3.scope": 0x40000100 == IN_CREATE|IN_ISDIR): inotify_add_watch /sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda8357f7c_f656_4c05_b8ab_cc93c82a1b7c.slice/crio-a2e947d718363fd1a40e9c4d85c1c795e21e7a1a20250e8407ef63be7cb690b3.scope: no such file or directory Sep 29 19:27:37 crc kubenswrapper[4779]: W0929 19:27:37.522409 4779 watcher.go:93] Error while processing event ("/sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podda54bd62_a0d1_438e_b7c2_dae26d204540.slice/crio-ba0ab139f0b098b085398c9159daa1e0a7ce62d4c53cc66a00a63f4c0b4c9a75.scope": 0x40000100 == IN_CREATE|IN_ISDIR): inotify_add_watch /sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podda54bd62_a0d1_438e_b7c2_dae26d204540.slice/crio-ba0ab139f0b098b085398c9159daa1e0a7ce62d4c53cc66a00a63f4c0b4c9a75.scope: no such file or directory Sep 29 19:27:37 crc kubenswrapper[4779]: W0929 19:27:37.522437 4779 watcher.go:93] Error while processing event ("/sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podbd57dbe5_edf2_4209_aec4_255ef19013ff.slice/crio-b4bdefe484633d644d4fb6e23f38a95a3d0e5d19997622f8cbab1b2b9e5dbf32.scope": 0x40000100 == IN_CREATE|IN_ISDIR): inotify_add_watch /sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podbd57dbe5_edf2_4209_aec4_255ef19013ff.slice/crio-b4bdefe484633d644d4fb6e23f38a95a3d0e5d19997622f8cbab1b2b9e5dbf32.scope: no such file or directory Sep 29 19:27:37 crc kubenswrapper[4779]: W0929 19:27:37.522466 4779 watcher.go:93] Error while processing event ("/sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda8357f7c_f656_4c05_b8ab_cc93c82a1b7c.slice/crio-conmon-1c9778ec4d63551117bdeb932f250c4e87a793a27d8a5d8f32d8369c1c3c517f.scope": 0x40000100 == IN_CREATE|IN_ISDIR): inotify_add_watch /sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda8357f7c_f656_4c05_b8ab_cc93c82a1b7c.slice/crio-conmon-1c9778ec4d63551117bdeb932f250c4e87a793a27d8a5d8f32d8369c1c3c517f.scope: no such file or directory Sep 29 19:27:37 crc kubenswrapper[4779]: W0929 19:27:37.522515 4779 watcher.go:93] Error while processing event ("/sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd6ba8b5d_fee9_4010_b4ba_f6cf7b691a88.slice/crio-conmon-99f5ae8752e103200194b8828deb36d0cbdf83d662d24b97112332882c327f61.scope": 0x40000100 == IN_CREATE|IN_ISDIR): inotify_add_watch /sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd6ba8b5d_fee9_4010_b4ba_f6cf7b691a88.slice/crio-conmon-99f5ae8752e103200194b8828deb36d0cbdf83d662d24b97112332882c327f61.scope: no such file or directory Sep 29 19:27:37 crc kubenswrapper[4779]: W0929 19:27:37.522544 4779 watcher.go:93] Error while processing event ("/sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda8357f7c_f656_4c05_b8ab_cc93c82a1b7c.slice/crio-1c9778ec4d63551117bdeb932f250c4e87a793a27d8a5d8f32d8369c1c3c517f.scope": 0x40000100 == IN_CREATE|IN_ISDIR): inotify_add_watch /sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda8357f7c_f656_4c05_b8ab_cc93c82a1b7c.slice/crio-1c9778ec4d63551117bdeb932f250c4e87a793a27d8a5d8f32d8369c1c3c517f.scope: no such file or directory Sep 29 19:27:37 crc kubenswrapper[4779]: W0929 19:27:37.522568 4779 watcher.go:93] Error while processing event ("/sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd6ba8b5d_fee9_4010_b4ba_f6cf7b691a88.slice/crio-99f5ae8752e103200194b8828deb36d0cbdf83d662d24b97112332882c327f61.scope": 0x40000100 == IN_CREATE|IN_ISDIR): inotify_add_watch /sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd6ba8b5d_fee9_4010_b4ba_f6cf7b691a88.slice/crio-99f5ae8752e103200194b8828deb36d0cbdf83d662d24b97112332882c327f61.scope: no such file or directory Sep 29 19:27:37 crc kubenswrapper[4779]: I0929 19:27:37.523730 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Sep 29 19:27:37 crc kubenswrapper[4779]: W0929 19:27:37.530765 4779 watcher.go:93] Error while processing event ("/sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod92729eb4_c37d_4cd9_ae70_2e7b2c3f2c88.slice": 0x40000100 == IN_CREATE|IN_ISDIR): inotify_add_watch /sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod92729eb4_c37d_4cd9_ae70_2e7b2c3f2c88.slice: no such file or directory Sep 29 19:27:37 crc kubenswrapper[4779]: I0929 19:27:37.790899 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="92729eb4-c37d-4cd9-ae70-2e7b2c3f2c88" path="/var/lib/kubelet/pods/92729eb4-c37d-4cd9-ae70-2e7b2c3f2c88/volumes" Sep 29 19:27:37 crc kubenswrapper[4779]: I0929 19:27:37.791716 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b39d898a-ba9c-4d8f-96eb-6e7cb5bcf703" path="/var/lib/kubelet/pods/b39d898a-ba9c-4d8f-96eb-6e7cb5bcf703/volumes" Sep 29 19:27:37 crc kubenswrapper[4779]: I0929 19:27:37.792331 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d6ba8b5d-fee9-4010-b4ba-f6cf7b691a88" path="/var/lib/kubelet/pods/d6ba8b5d-fee9-4010-b4ba-f6cf7b691a88/volumes" Sep 29 19:27:37 crc kubenswrapper[4779]: I0929 19:27:37.793461 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="da54bd62-a0d1-438e-b7c2-dae26d204540" path="/var/lib/kubelet/pods/da54bd62-a0d1-438e-b7c2-dae26d204540/volumes" Sep 29 19:27:37 crc kubenswrapper[4779]: E0929 19:27:37.819115 4779 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd6ba8b5d_fee9_4010_b4ba_f6cf7b691a88.slice/crio-1ae843fb89e96ef407e9da22be3ec3d1f475d1231893de904389b8a38491a35a\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podbf8c83fc_5e0d_41f8_b1f6_f7004e73ec39.slice/crio-6aa05722e5d057c8fcc215edfab704dadea3447805c2496ee9ee2f263c29dedc.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb39d898a_ba9c_4d8f_96eb_6e7cb5bcf703.slice/crio-1be1062c512f6b7378ae42a72eb7f0e278c7a5b9b52463e15a34d91db5542732.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda5cb4dde_7de2_43a6_aeeb_92350ca1454d.slice/crio-50616d334c27a1bb1e1d894426d7c3990a2083878d342630350e2e2f13127eaa.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podda54bd62_a0d1_438e_b7c2_dae26d204540.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1ffdc490_1007_41e8_a410_97a78d400395.slice/crio-884b7083c272788693584552bc8512f7ab999b2bfeffc1ead266268aaa1801d5\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda5cb4dde_7de2_43a6_aeeb_92350ca1454d.slice/crio-conmon-50616d334c27a1bb1e1d894426d7c3990a2083878d342630350e2e2f13127eaa.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podbf8c83fc_5e0d_41f8_b1f6_f7004e73ec39.slice/crio-conmon-6aa05722e5d057c8fcc215edfab704dadea3447805c2496ee9ee2f263c29dedc.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb39d898a_ba9c_4d8f_96eb_6e7cb5bcf703.slice/crio-conmon-1be1062c512f6b7378ae42a72eb7f0e278c7a5b9b52463e15a34d91db5542732.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb39d898a_ba9c_4d8f_96eb_6e7cb5bcf703.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd6ba8b5d_fee9_4010_b4ba_f6cf7b691a88.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb39d898a_ba9c_4d8f_96eb_6e7cb5bcf703.slice/crio-dfabc8ae8c47711a922efea5c1fafac05c7d962ec11901497f1ba77ae90c5a1d\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podbf8c83fc_5e0d_41f8_b1f6_f7004e73ec39.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podbf8c83fc_5e0d_41f8_b1f6_f7004e73ec39.slice/crio-fcdfad0c61b61745b863c383cd7feb5ac71df46ad3424d573923cfb824d8ba01\": RecentStats: unable to find data in memory cache]" Sep 29 19:27:37 crc kubenswrapper[4779]: I0929 19:27:37.911724 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 19:27:37 crc kubenswrapper[4779]: I0929 19:27:37.987274 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Sep 29 19:27:38 crc kubenswrapper[4779]: I0929 19:27:38.001929 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"47bbd60f-e99d-48c8-94ba-908fc7162be3","Type":"ContainerStarted","Data":"e01759e90c577b0a56e74c4f1687ad0b5c308b21fbbe7e331b891ddfd25f8078"} Sep 29 19:27:38 crc kubenswrapper[4779]: I0929 19:27:38.002924 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"87caa150-0994-48a8-816a-b6dccebf4092","Type":"ContainerStarted","Data":"34e578f8e0584bf38b24f8ff87f0c1d3e304886004be26c1062d58f832e214fb"} Sep 29 19:27:38 crc kubenswrapper[4779]: I0929 19:27:38.002948 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"87caa150-0994-48a8-816a-b6dccebf4092","Type":"ContainerStarted","Data":"0f5488f96b05786849be81b77e2be122f2b4a0196858a3c501a2ad4e0c958416"} Sep 29 19:27:38 crc kubenswrapper[4779]: I0929 19:27:38.003476 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-conductor-0" Sep 29 19:27:38 crc kubenswrapper[4779]: I0929 19:27:38.006331 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"7a6eecd5-c53f-4f79-b8d4-ffd8527ce942","Type":"ContainerStarted","Data":"0821dddac90581c34ed5415260b05da4f7cc53737b66f9c5c95e3ec1f9d7fb71"} Sep 29 19:27:38 crc kubenswrapper[4779]: I0929 19:27:38.006446 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"7a6eecd5-c53f-4f79-b8d4-ffd8527ce942","Type":"ContainerStarted","Data":"07f48468de66627bca6c629bfc0a1dd138a75f36b207b497b23853f0c3e7d5be"} Sep 29 19:27:38 crc kubenswrapper[4779]: I0929 19:27:38.009954 4779 generic.go:334] "Generic (PLEG): container finished" podID="a5cb4dde-7de2-43a6-aeeb-92350ca1454d" containerID="50616d334c27a1bb1e1d894426d7c3990a2083878d342630350e2e2f13127eaa" exitCode=137 Sep 29 19:27:38 crc kubenswrapper[4779]: I0929 19:27:38.010028 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a5cb4dde-7de2-43a6-aeeb-92350ca1454d","Type":"ContainerDied","Data":"50616d334c27a1bb1e1d894426d7c3990a2083878d342630350e2e2f13127eaa"} Sep 29 19:27:38 crc kubenswrapper[4779]: I0929 19:27:38.010059 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a5cb4dde-7de2-43a6-aeeb-92350ca1454d","Type":"ContainerDied","Data":"4209017d2fbbbf63c3ea13fb158acc61da6e482290360c442860b2d15a6327bc"} Sep 29 19:27:38 crc kubenswrapper[4779]: I0929 19:27:38.010076 4779 scope.go:117] "RemoveContainer" containerID="50616d334c27a1bb1e1d894426d7c3990a2083878d342630350e2e2f13127eaa" Sep 29 19:27:38 crc kubenswrapper[4779]: I0929 19:27:38.010011 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 19:27:38 crc kubenswrapper[4779]: I0929 19:27:38.022716 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-0" podStartSLOduration=2.022695251 podStartE2EDuration="2.022695251s" podCreationTimestamp="2025-09-29 19:27:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 19:27:38.015715721 +0000 UTC m=+1168.900140821" watchObservedRunningTime="2025-09-29 19:27:38.022695251 +0000 UTC m=+1168.907120341" Sep 29 19:27:38 crc kubenswrapper[4779]: I0929 19:27:38.036823 4779 scope.go:117] "RemoveContainer" containerID="a9a614f9b9a82cead8707f438ea1ffcfcc3ad18f7a132e9b01015550a7a0c3f1" Sep 29 19:27:38 crc kubenswrapper[4779]: I0929 19:27:38.036997 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.036986881 podStartE2EDuration="2.036986881s" podCreationTimestamp="2025-09-29 19:27:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 19:27:38.03217189 +0000 UTC m=+1168.916596990" watchObservedRunningTime="2025-09-29 19:27:38.036986881 +0000 UTC m=+1168.921411981" Sep 29 19:27:38 crc kubenswrapper[4779]: I0929 19:27:38.060624 4779 scope.go:117] "RemoveContainer" containerID="c097cdad9d4cff552d8123a6eb81980dfe8591805a605366e9cb9b66b052b3cd" Sep 29 19:27:38 crc kubenswrapper[4779]: I0929 19:27:38.074349 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Sep 29 19:27:38 crc kubenswrapper[4779]: I0929 19:27:38.092454 4779 scope.go:117] "RemoveContainer" containerID="40ad0a556d671ba8415d1985960925c2f64c1b7936b55e388189890aa35cafab" Sep 29 19:27:38 crc kubenswrapper[4779]: I0929 19:27:38.097232 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a5cb4dde-7de2-43a6-aeeb-92350ca1454d-log-httpd\") pod \"a5cb4dde-7de2-43a6-aeeb-92350ca1454d\" (UID: \"a5cb4dde-7de2-43a6-aeeb-92350ca1454d\") " Sep 29 19:27:38 crc kubenswrapper[4779]: I0929 19:27:38.097338 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a5cb4dde-7de2-43a6-aeeb-92350ca1454d-combined-ca-bundle\") pod \"a5cb4dde-7de2-43a6-aeeb-92350ca1454d\" (UID: \"a5cb4dde-7de2-43a6-aeeb-92350ca1454d\") " Sep 29 19:27:38 crc kubenswrapper[4779]: I0929 19:27:38.097384 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nvzl4\" (UniqueName: \"kubernetes.io/projected/a5cb4dde-7de2-43a6-aeeb-92350ca1454d-kube-api-access-nvzl4\") pod \"a5cb4dde-7de2-43a6-aeeb-92350ca1454d\" (UID: \"a5cb4dde-7de2-43a6-aeeb-92350ca1454d\") " Sep 29 19:27:38 crc kubenswrapper[4779]: I0929 19:27:38.097443 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a5cb4dde-7de2-43a6-aeeb-92350ca1454d-config-data\") pod \"a5cb4dde-7de2-43a6-aeeb-92350ca1454d\" (UID: \"a5cb4dde-7de2-43a6-aeeb-92350ca1454d\") " Sep 29 19:27:38 crc kubenswrapper[4779]: I0929 19:27:38.097461 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a5cb4dde-7de2-43a6-aeeb-92350ca1454d-sg-core-conf-yaml\") pod \"a5cb4dde-7de2-43a6-aeeb-92350ca1454d\" (UID: \"a5cb4dde-7de2-43a6-aeeb-92350ca1454d\") " Sep 29 19:27:38 crc kubenswrapper[4779]: I0929 19:27:38.097588 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a5cb4dde-7de2-43a6-aeeb-92350ca1454d-scripts\") pod \"a5cb4dde-7de2-43a6-aeeb-92350ca1454d\" (UID: \"a5cb4dde-7de2-43a6-aeeb-92350ca1454d\") " Sep 29 19:27:38 crc kubenswrapper[4779]: I0929 19:27:38.097649 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a5cb4dde-7de2-43a6-aeeb-92350ca1454d-run-httpd\") pod \"a5cb4dde-7de2-43a6-aeeb-92350ca1454d\" (UID: \"a5cb4dde-7de2-43a6-aeeb-92350ca1454d\") " Sep 29 19:27:38 crc kubenswrapper[4779]: I0929 19:27:38.098601 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a5cb4dde-7de2-43a6-aeeb-92350ca1454d-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "a5cb4dde-7de2-43a6-aeeb-92350ca1454d" (UID: "a5cb4dde-7de2-43a6-aeeb-92350ca1454d"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 19:27:38 crc kubenswrapper[4779]: I0929 19:27:38.101699 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a5cb4dde-7de2-43a6-aeeb-92350ca1454d-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "a5cb4dde-7de2-43a6-aeeb-92350ca1454d" (UID: "a5cb4dde-7de2-43a6-aeeb-92350ca1454d"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 19:27:38 crc kubenswrapper[4779]: I0929 19:27:38.104343 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a5cb4dde-7de2-43a6-aeeb-92350ca1454d-kube-api-access-nvzl4" (OuterVolumeSpecName: "kube-api-access-nvzl4") pod "a5cb4dde-7de2-43a6-aeeb-92350ca1454d" (UID: "a5cb4dde-7de2-43a6-aeeb-92350ca1454d"). InnerVolumeSpecName "kube-api-access-nvzl4". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:27:38 crc kubenswrapper[4779]: I0929 19:27:38.104841 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a5cb4dde-7de2-43a6-aeeb-92350ca1454d-scripts" (OuterVolumeSpecName: "scripts") pod "a5cb4dde-7de2-43a6-aeeb-92350ca1454d" (UID: "a5cb4dde-7de2-43a6-aeeb-92350ca1454d"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:27:38 crc kubenswrapper[4779]: I0929 19:27:38.135429 4779 scope.go:117] "RemoveContainer" containerID="50616d334c27a1bb1e1d894426d7c3990a2083878d342630350e2e2f13127eaa" Sep 29 19:27:38 crc kubenswrapper[4779]: E0929 19:27:38.135870 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"50616d334c27a1bb1e1d894426d7c3990a2083878d342630350e2e2f13127eaa\": container with ID starting with 50616d334c27a1bb1e1d894426d7c3990a2083878d342630350e2e2f13127eaa not found: ID does not exist" containerID="50616d334c27a1bb1e1d894426d7c3990a2083878d342630350e2e2f13127eaa" Sep 29 19:27:38 crc kubenswrapper[4779]: I0929 19:27:38.135932 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"50616d334c27a1bb1e1d894426d7c3990a2083878d342630350e2e2f13127eaa"} err="failed to get container status \"50616d334c27a1bb1e1d894426d7c3990a2083878d342630350e2e2f13127eaa\": rpc error: code = NotFound desc = could not find container \"50616d334c27a1bb1e1d894426d7c3990a2083878d342630350e2e2f13127eaa\": container with ID starting with 50616d334c27a1bb1e1d894426d7c3990a2083878d342630350e2e2f13127eaa not found: ID does not exist" Sep 29 19:27:38 crc kubenswrapper[4779]: I0929 19:27:38.135967 4779 scope.go:117] "RemoveContainer" containerID="a9a614f9b9a82cead8707f438ea1ffcfcc3ad18f7a132e9b01015550a7a0c3f1" Sep 29 19:27:38 crc kubenswrapper[4779]: E0929 19:27:38.136237 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a9a614f9b9a82cead8707f438ea1ffcfcc3ad18f7a132e9b01015550a7a0c3f1\": container with ID starting with a9a614f9b9a82cead8707f438ea1ffcfcc3ad18f7a132e9b01015550a7a0c3f1 not found: ID does not exist" containerID="a9a614f9b9a82cead8707f438ea1ffcfcc3ad18f7a132e9b01015550a7a0c3f1" Sep 29 19:27:38 crc kubenswrapper[4779]: I0929 19:27:38.136340 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a9a614f9b9a82cead8707f438ea1ffcfcc3ad18f7a132e9b01015550a7a0c3f1"} err="failed to get container status \"a9a614f9b9a82cead8707f438ea1ffcfcc3ad18f7a132e9b01015550a7a0c3f1\": rpc error: code = NotFound desc = could not find container \"a9a614f9b9a82cead8707f438ea1ffcfcc3ad18f7a132e9b01015550a7a0c3f1\": container with ID starting with a9a614f9b9a82cead8707f438ea1ffcfcc3ad18f7a132e9b01015550a7a0c3f1 not found: ID does not exist" Sep 29 19:27:38 crc kubenswrapper[4779]: I0929 19:27:38.136411 4779 scope.go:117] "RemoveContainer" containerID="c097cdad9d4cff552d8123a6eb81980dfe8591805a605366e9cb9b66b052b3cd" Sep 29 19:27:38 crc kubenswrapper[4779]: E0929 19:27:38.136809 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c097cdad9d4cff552d8123a6eb81980dfe8591805a605366e9cb9b66b052b3cd\": container with ID starting with c097cdad9d4cff552d8123a6eb81980dfe8591805a605366e9cb9b66b052b3cd not found: ID does not exist" containerID="c097cdad9d4cff552d8123a6eb81980dfe8591805a605366e9cb9b66b052b3cd" Sep 29 19:27:38 crc kubenswrapper[4779]: I0929 19:27:38.136847 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c097cdad9d4cff552d8123a6eb81980dfe8591805a605366e9cb9b66b052b3cd"} err="failed to get container status \"c097cdad9d4cff552d8123a6eb81980dfe8591805a605366e9cb9b66b052b3cd\": rpc error: code = NotFound desc = could not find container \"c097cdad9d4cff552d8123a6eb81980dfe8591805a605366e9cb9b66b052b3cd\": container with ID starting with c097cdad9d4cff552d8123a6eb81980dfe8591805a605366e9cb9b66b052b3cd not found: ID does not exist" Sep 29 19:27:38 crc kubenswrapper[4779]: I0929 19:27:38.136870 4779 scope.go:117] "RemoveContainer" containerID="40ad0a556d671ba8415d1985960925c2f64c1b7936b55e388189890aa35cafab" Sep 29 19:27:38 crc kubenswrapper[4779]: E0929 19:27:38.137093 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"40ad0a556d671ba8415d1985960925c2f64c1b7936b55e388189890aa35cafab\": container with ID starting with 40ad0a556d671ba8415d1985960925c2f64c1b7936b55e388189890aa35cafab not found: ID does not exist" containerID="40ad0a556d671ba8415d1985960925c2f64c1b7936b55e388189890aa35cafab" Sep 29 19:27:38 crc kubenswrapper[4779]: I0929 19:27:38.137200 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"40ad0a556d671ba8415d1985960925c2f64c1b7936b55e388189890aa35cafab"} err="failed to get container status \"40ad0a556d671ba8415d1985960925c2f64c1b7936b55e388189890aa35cafab\": rpc error: code = NotFound desc = could not find container \"40ad0a556d671ba8415d1985960925c2f64c1b7936b55e388189890aa35cafab\": container with ID starting with 40ad0a556d671ba8415d1985960925c2f64c1b7936b55e388189890aa35cafab not found: ID does not exist" Sep 29 19:27:38 crc kubenswrapper[4779]: I0929 19:27:38.154613 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a5cb4dde-7de2-43a6-aeeb-92350ca1454d-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "a5cb4dde-7de2-43a6-aeeb-92350ca1454d" (UID: "a5cb4dde-7de2-43a6-aeeb-92350ca1454d"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:27:38 crc kubenswrapper[4779]: I0929 19:27:38.196250 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a5cb4dde-7de2-43a6-aeeb-92350ca1454d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a5cb4dde-7de2-43a6-aeeb-92350ca1454d" (UID: "a5cb4dde-7de2-43a6-aeeb-92350ca1454d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:27:38 crc kubenswrapper[4779]: I0929 19:27:38.200523 4779 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a5cb4dde-7de2-43a6-aeeb-92350ca1454d-scripts\") on node \"crc\" DevicePath \"\"" Sep 29 19:27:38 crc kubenswrapper[4779]: I0929 19:27:38.200548 4779 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a5cb4dde-7de2-43a6-aeeb-92350ca1454d-run-httpd\") on node \"crc\" DevicePath \"\"" Sep 29 19:27:38 crc kubenswrapper[4779]: I0929 19:27:38.200556 4779 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a5cb4dde-7de2-43a6-aeeb-92350ca1454d-log-httpd\") on node \"crc\" DevicePath \"\"" Sep 29 19:27:38 crc kubenswrapper[4779]: I0929 19:27:38.200565 4779 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a5cb4dde-7de2-43a6-aeeb-92350ca1454d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 19:27:38 crc kubenswrapper[4779]: I0929 19:27:38.200576 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nvzl4\" (UniqueName: \"kubernetes.io/projected/a5cb4dde-7de2-43a6-aeeb-92350ca1454d-kube-api-access-nvzl4\") on node \"crc\" DevicePath \"\"" Sep 29 19:27:38 crc kubenswrapper[4779]: I0929 19:27:38.200585 4779 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a5cb4dde-7de2-43a6-aeeb-92350ca1454d-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Sep 29 19:27:38 crc kubenswrapper[4779]: I0929 19:27:38.226105 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a5cb4dde-7de2-43a6-aeeb-92350ca1454d-config-data" (OuterVolumeSpecName: "config-data") pod "a5cb4dde-7de2-43a6-aeeb-92350ca1454d" (UID: "a5cb4dde-7de2-43a6-aeeb-92350ca1454d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:27:38 crc kubenswrapper[4779]: I0929 19:27:38.302800 4779 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a5cb4dde-7de2-43a6-aeeb-92350ca1454d-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 19:27:38 crc kubenswrapper[4779]: I0929 19:27:38.400621 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Sep 29 19:27:38 crc kubenswrapper[4779]: I0929 19:27:38.408291 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Sep 29 19:27:38 crc kubenswrapper[4779]: I0929 19:27:38.427046 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Sep 29 19:27:38 crc kubenswrapper[4779]: E0929 19:27:38.427600 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a5cb4dde-7de2-43a6-aeeb-92350ca1454d" containerName="proxy-httpd" Sep 29 19:27:38 crc kubenswrapper[4779]: I0929 19:27:38.427624 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="a5cb4dde-7de2-43a6-aeeb-92350ca1454d" containerName="proxy-httpd" Sep 29 19:27:38 crc kubenswrapper[4779]: E0929 19:27:38.427645 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a5cb4dde-7de2-43a6-aeeb-92350ca1454d" containerName="ceilometer-notification-agent" Sep 29 19:27:38 crc kubenswrapper[4779]: I0929 19:27:38.427656 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="a5cb4dde-7de2-43a6-aeeb-92350ca1454d" containerName="ceilometer-notification-agent" Sep 29 19:27:38 crc kubenswrapper[4779]: E0929 19:27:38.427672 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a5cb4dde-7de2-43a6-aeeb-92350ca1454d" containerName="ceilometer-central-agent" Sep 29 19:27:38 crc kubenswrapper[4779]: I0929 19:27:38.427680 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="a5cb4dde-7de2-43a6-aeeb-92350ca1454d" containerName="ceilometer-central-agent" Sep 29 19:27:38 crc kubenswrapper[4779]: E0929 19:27:38.427710 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a5cb4dde-7de2-43a6-aeeb-92350ca1454d" containerName="sg-core" Sep 29 19:27:38 crc kubenswrapper[4779]: I0929 19:27:38.427719 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="a5cb4dde-7de2-43a6-aeeb-92350ca1454d" containerName="sg-core" Sep 29 19:27:38 crc kubenswrapper[4779]: I0929 19:27:38.427936 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="a5cb4dde-7de2-43a6-aeeb-92350ca1454d" containerName="sg-core" Sep 29 19:27:38 crc kubenswrapper[4779]: I0929 19:27:38.427955 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="a5cb4dde-7de2-43a6-aeeb-92350ca1454d" containerName="ceilometer-notification-agent" Sep 29 19:27:38 crc kubenswrapper[4779]: I0929 19:27:38.427965 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="a5cb4dde-7de2-43a6-aeeb-92350ca1454d" containerName="proxy-httpd" Sep 29 19:27:38 crc kubenswrapper[4779]: I0929 19:27:38.427981 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="a5cb4dde-7de2-43a6-aeeb-92350ca1454d" containerName="ceilometer-central-agent" Sep 29 19:27:38 crc kubenswrapper[4779]: I0929 19:27:38.430176 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 19:27:38 crc kubenswrapper[4779]: I0929 19:27:38.432349 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Sep 29 19:27:38 crc kubenswrapper[4779]: I0929 19:27:38.432634 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Sep 29 19:27:38 crc kubenswrapper[4779]: I0929 19:27:38.440990 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 29 19:27:38 crc kubenswrapper[4779]: I0929 19:27:38.505147 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f4fv9\" (UniqueName: \"kubernetes.io/projected/e1ced773-4819-4d6b-a4db-9d6bb5f6f9c3-kube-api-access-f4fv9\") pod \"ceilometer-0\" (UID: \"e1ced773-4819-4d6b-a4db-9d6bb5f6f9c3\") " pod="openstack/ceilometer-0" Sep 29 19:27:38 crc kubenswrapper[4779]: I0929 19:27:38.505500 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e1ced773-4819-4d6b-a4db-9d6bb5f6f9c3-scripts\") pod \"ceilometer-0\" (UID: \"e1ced773-4819-4d6b-a4db-9d6bb5f6f9c3\") " pod="openstack/ceilometer-0" Sep 29 19:27:38 crc kubenswrapper[4779]: I0929 19:27:38.505556 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/e1ced773-4819-4d6b-a4db-9d6bb5f6f9c3-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"e1ced773-4819-4d6b-a4db-9d6bb5f6f9c3\") " pod="openstack/ceilometer-0" Sep 29 19:27:38 crc kubenswrapper[4779]: I0929 19:27:38.505579 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e1ced773-4819-4d6b-a4db-9d6bb5f6f9c3-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"e1ced773-4819-4d6b-a4db-9d6bb5f6f9c3\") " pod="openstack/ceilometer-0" Sep 29 19:27:38 crc kubenswrapper[4779]: I0929 19:27:38.505605 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e1ced773-4819-4d6b-a4db-9d6bb5f6f9c3-log-httpd\") pod \"ceilometer-0\" (UID: \"e1ced773-4819-4d6b-a4db-9d6bb5f6f9c3\") " pod="openstack/ceilometer-0" Sep 29 19:27:38 crc kubenswrapper[4779]: I0929 19:27:38.505669 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e1ced773-4819-4d6b-a4db-9d6bb5f6f9c3-run-httpd\") pod \"ceilometer-0\" (UID: \"e1ced773-4819-4d6b-a4db-9d6bb5f6f9c3\") " pod="openstack/ceilometer-0" Sep 29 19:27:38 crc kubenswrapper[4779]: I0929 19:27:38.505707 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e1ced773-4819-4d6b-a4db-9d6bb5f6f9c3-config-data\") pod \"ceilometer-0\" (UID: \"e1ced773-4819-4d6b-a4db-9d6bb5f6f9c3\") " pod="openstack/ceilometer-0" Sep 29 19:27:38 crc kubenswrapper[4779]: I0929 19:27:38.606850 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e1ced773-4819-4d6b-a4db-9d6bb5f6f9c3-run-httpd\") pod \"ceilometer-0\" (UID: \"e1ced773-4819-4d6b-a4db-9d6bb5f6f9c3\") " pod="openstack/ceilometer-0" Sep 29 19:27:38 crc kubenswrapper[4779]: I0929 19:27:38.606907 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e1ced773-4819-4d6b-a4db-9d6bb5f6f9c3-config-data\") pod \"ceilometer-0\" (UID: \"e1ced773-4819-4d6b-a4db-9d6bb5f6f9c3\") " pod="openstack/ceilometer-0" Sep 29 19:27:38 crc kubenswrapper[4779]: I0929 19:27:38.606958 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f4fv9\" (UniqueName: \"kubernetes.io/projected/e1ced773-4819-4d6b-a4db-9d6bb5f6f9c3-kube-api-access-f4fv9\") pod \"ceilometer-0\" (UID: \"e1ced773-4819-4d6b-a4db-9d6bb5f6f9c3\") " pod="openstack/ceilometer-0" Sep 29 19:27:38 crc kubenswrapper[4779]: I0929 19:27:38.606976 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e1ced773-4819-4d6b-a4db-9d6bb5f6f9c3-scripts\") pod \"ceilometer-0\" (UID: \"e1ced773-4819-4d6b-a4db-9d6bb5f6f9c3\") " pod="openstack/ceilometer-0" Sep 29 19:27:38 crc kubenswrapper[4779]: I0929 19:27:38.607005 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/e1ced773-4819-4d6b-a4db-9d6bb5f6f9c3-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"e1ced773-4819-4d6b-a4db-9d6bb5f6f9c3\") " pod="openstack/ceilometer-0" Sep 29 19:27:38 crc kubenswrapper[4779]: I0929 19:27:38.607027 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e1ced773-4819-4d6b-a4db-9d6bb5f6f9c3-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"e1ced773-4819-4d6b-a4db-9d6bb5f6f9c3\") " pod="openstack/ceilometer-0" Sep 29 19:27:38 crc kubenswrapper[4779]: I0929 19:27:38.607053 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e1ced773-4819-4d6b-a4db-9d6bb5f6f9c3-log-httpd\") pod \"ceilometer-0\" (UID: \"e1ced773-4819-4d6b-a4db-9d6bb5f6f9c3\") " pod="openstack/ceilometer-0" Sep 29 19:27:38 crc kubenswrapper[4779]: I0929 19:27:38.607590 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e1ced773-4819-4d6b-a4db-9d6bb5f6f9c3-log-httpd\") pod \"ceilometer-0\" (UID: \"e1ced773-4819-4d6b-a4db-9d6bb5f6f9c3\") " pod="openstack/ceilometer-0" Sep 29 19:27:38 crc kubenswrapper[4779]: I0929 19:27:38.607776 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e1ced773-4819-4d6b-a4db-9d6bb5f6f9c3-run-httpd\") pod \"ceilometer-0\" (UID: \"e1ced773-4819-4d6b-a4db-9d6bb5f6f9c3\") " pod="openstack/ceilometer-0" Sep 29 19:27:38 crc kubenswrapper[4779]: I0929 19:27:38.611134 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e1ced773-4819-4d6b-a4db-9d6bb5f6f9c3-scripts\") pod \"ceilometer-0\" (UID: \"e1ced773-4819-4d6b-a4db-9d6bb5f6f9c3\") " pod="openstack/ceilometer-0" Sep 29 19:27:38 crc kubenswrapper[4779]: I0929 19:27:38.612102 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/e1ced773-4819-4d6b-a4db-9d6bb5f6f9c3-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"e1ced773-4819-4d6b-a4db-9d6bb5f6f9c3\") " pod="openstack/ceilometer-0" Sep 29 19:27:38 crc kubenswrapper[4779]: I0929 19:27:38.612982 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e1ced773-4819-4d6b-a4db-9d6bb5f6f9c3-config-data\") pod \"ceilometer-0\" (UID: \"e1ced773-4819-4d6b-a4db-9d6bb5f6f9c3\") " pod="openstack/ceilometer-0" Sep 29 19:27:38 crc kubenswrapper[4779]: I0929 19:27:38.622895 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e1ced773-4819-4d6b-a4db-9d6bb5f6f9c3-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"e1ced773-4819-4d6b-a4db-9d6bb5f6f9c3\") " pod="openstack/ceilometer-0" Sep 29 19:27:38 crc kubenswrapper[4779]: I0929 19:27:38.624014 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f4fv9\" (UniqueName: \"kubernetes.io/projected/e1ced773-4819-4d6b-a4db-9d6bb5f6f9c3-kube-api-access-f4fv9\") pod \"ceilometer-0\" (UID: \"e1ced773-4819-4d6b-a4db-9d6bb5f6f9c3\") " pod="openstack/ceilometer-0" Sep 29 19:27:38 crc kubenswrapper[4779]: I0929 19:27:38.759991 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 19:27:39 crc kubenswrapper[4779]: I0929 19:27:39.023056 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"47bbd60f-e99d-48c8-94ba-908fc7162be3","Type":"ContainerStarted","Data":"48fa555574bb15ab0721dd6cf2a3aa42037b9f87b0f0b65f12d7360609fa31df"} Sep 29 19:27:39 crc kubenswrapper[4779]: I0929 19:27:39.023364 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"47bbd60f-e99d-48c8-94ba-908fc7162be3","Type":"ContainerStarted","Data":"eb9e617ee618549e7b758785be3a39731b7900b47f170efd4afa10ea66f280f3"} Sep 29 19:27:39 crc kubenswrapper[4779]: I0929 19:27:39.025527 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"57f1ca09-c341-4f3c-a1e0-e9f27777601f","Type":"ContainerStarted","Data":"a15cd7424bab1deb21019137d5fa5cbaf35ed4c9e748e3a29c86bf966b587789"} Sep 29 19:27:39 crc kubenswrapper[4779]: I0929 19:27:39.025549 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"57f1ca09-c341-4f3c-a1e0-e9f27777601f","Type":"ContainerStarted","Data":"79f43c83ed00301f5eeadc2aa21adef0d61e8c8b362dc69f769e41d0b1e7b8be"} Sep 29 19:27:39 crc kubenswrapper[4779]: I0929 19:27:39.046084 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.046069543 podStartE2EDuration="2.046069543s" podCreationTimestamp="2025-09-29 19:27:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 19:27:39.0444963 +0000 UTC m=+1169.928921400" watchObservedRunningTime="2025-09-29 19:27:39.046069543 +0000 UTC m=+1169.930494633" Sep 29 19:27:39 crc kubenswrapper[4779]: I0929 19:27:39.065786 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.06576868 podStartE2EDuration="2.06576868s" podCreationTimestamp="2025-09-29 19:27:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 19:27:39.057915066 +0000 UTC m=+1169.942340166" watchObservedRunningTime="2025-09-29 19:27:39.06576868 +0000 UTC m=+1169.950193780" Sep 29 19:27:39 crc kubenswrapper[4779]: W0929 19:27:39.226285 4779 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode1ced773_4819_4d6b_a4db_9d6bb5f6f9c3.slice/crio-a1fdca301a1935a8863752f28ca0d524a4bc151f8c6a2cc1c24d605f74262c91 WatchSource:0}: Error finding container a1fdca301a1935a8863752f28ca0d524a4bc151f8c6a2cc1c24d605f74262c91: Status 404 returned error can't find the container with id a1fdca301a1935a8863752f28ca0d524a4bc151f8c6a2cc1c24d605f74262c91 Sep 29 19:27:39 crc kubenswrapper[4779]: I0929 19:27:39.238483 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 29 19:27:39 crc kubenswrapper[4779]: I0929 19:27:39.784736 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a5cb4dde-7de2-43a6-aeeb-92350ca1454d" path="/var/lib/kubelet/pods/a5cb4dde-7de2-43a6-aeeb-92350ca1454d/volumes" Sep 29 19:27:40 crc kubenswrapper[4779]: I0929 19:27:40.036472 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e1ced773-4819-4d6b-a4db-9d6bb5f6f9c3","Type":"ContainerStarted","Data":"876ec120cb2f08d351d494a427e9edb1e9a469f67fffe4b16724e3f5909e637b"} Sep 29 19:27:40 crc kubenswrapper[4779]: I0929 19:27:40.036545 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e1ced773-4819-4d6b-a4db-9d6bb5f6f9c3","Type":"ContainerStarted","Data":"a1fdca301a1935a8863752f28ca0d524a4bc151f8c6a2cc1c24d605f74262c91"} Sep 29 19:27:41 crc kubenswrapper[4779]: I0929 19:27:41.051951 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e1ced773-4819-4d6b-a4db-9d6bb5f6f9c3","Type":"ContainerStarted","Data":"333605364d7ef98e1636f60d32c381742e7745c347d1ee937b0d684bcbf59518"} Sep 29 19:27:42 crc kubenswrapper[4779]: I0929 19:27:42.067742 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e1ced773-4819-4d6b-a4db-9d6bb5f6f9c3","Type":"ContainerStarted","Data":"24e553dace1be1579dee12d64d97573c46bd88a4704e25d478496a8f6f29922f"} Sep 29 19:27:42 crc kubenswrapper[4779]: I0929 19:27:42.315619 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-cell1-novncproxy-0" Sep 29 19:27:42 crc kubenswrapper[4779]: I0929 19:27:42.336409 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-cell1-novncproxy-0" Sep 29 19:27:42 crc kubenswrapper[4779]: I0929 19:27:42.443521 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Sep 29 19:27:42 crc kubenswrapper[4779]: I0929 19:27:42.443603 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Sep 29 19:27:42 crc kubenswrapper[4779]: I0929 19:27:42.525157 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Sep 29 19:27:43 crc kubenswrapper[4779]: I0929 19:27:43.087996 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e1ced773-4819-4d6b-a4db-9d6bb5f6f9c3","Type":"ContainerStarted","Data":"222dd839a81f67c1741614289d6928adfa418e447c353ec41fc0e333993ffed6"} Sep 29 19:27:43 crc kubenswrapper[4779]: I0929 19:27:43.088610 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Sep 29 19:27:43 crc kubenswrapper[4779]: I0929 19:27:43.108553 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-novncproxy-0" Sep 29 19:27:43 crc kubenswrapper[4779]: I0929 19:27:43.126924 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=1.756417451 podStartE2EDuration="5.126902713s" podCreationTimestamp="2025-09-29 19:27:38 +0000 UTC" firstStartedPulling="2025-09-29 19:27:39.229364546 +0000 UTC m=+1170.113789656" lastFinishedPulling="2025-09-29 19:27:42.599849778 +0000 UTC m=+1173.484274918" observedRunningTime="2025-09-29 19:27:43.118955326 +0000 UTC m=+1174.003380436" watchObservedRunningTime="2025-09-29 19:27:43.126902713 +0000 UTC m=+1174.011327813" Sep 29 19:27:46 crc kubenswrapper[4779]: I0929 19:27:46.397347 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Sep 29 19:27:46 crc kubenswrapper[4779]: I0929 19:27:46.398161 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Sep 29 19:27:46 crc kubenswrapper[4779]: I0929 19:27:46.554731 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-conductor-0" Sep 29 19:27:47 crc kubenswrapper[4779]: I0929 19:27:47.113749 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-cell-mapping-97n2m"] Sep 29 19:27:47 crc kubenswrapper[4779]: I0929 19:27:47.115104 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-97n2m" Sep 29 19:27:47 crc kubenswrapper[4779]: I0929 19:27:47.121005 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-manage-scripts" Sep 29 19:27:47 crc kubenswrapper[4779]: I0929 19:27:47.121030 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-manage-config-data" Sep 29 19:27:47 crc kubenswrapper[4779]: I0929 19:27:47.141724 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-cell-mapping-97n2m"] Sep 29 19:27:47 crc kubenswrapper[4779]: I0929 19:27:47.300459 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zmlkb\" (UniqueName: \"kubernetes.io/projected/b0792c39-e57a-412f-96b3-c40605b6b146-kube-api-access-zmlkb\") pod \"nova-cell1-cell-mapping-97n2m\" (UID: \"b0792c39-e57a-412f-96b3-c40605b6b146\") " pod="openstack/nova-cell1-cell-mapping-97n2m" Sep 29 19:27:47 crc kubenswrapper[4779]: I0929 19:27:47.300597 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b0792c39-e57a-412f-96b3-c40605b6b146-config-data\") pod \"nova-cell1-cell-mapping-97n2m\" (UID: \"b0792c39-e57a-412f-96b3-c40605b6b146\") " pod="openstack/nova-cell1-cell-mapping-97n2m" Sep 29 19:27:47 crc kubenswrapper[4779]: I0929 19:27:47.300872 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b0792c39-e57a-412f-96b3-c40605b6b146-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-97n2m\" (UID: \"b0792c39-e57a-412f-96b3-c40605b6b146\") " pod="openstack/nova-cell1-cell-mapping-97n2m" Sep 29 19:27:47 crc kubenswrapper[4779]: I0929 19:27:47.300957 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b0792c39-e57a-412f-96b3-c40605b6b146-scripts\") pod \"nova-cell1-cell-mapping-97n2m\" (UID: \"b0792c39-e57a-412f-96b3-c40605b6b146\") " pod="openstack/nova-cell1-cell-mapping-97n2m" Sep 29 19:27:47 crc kubenswrapper[4779]: I0929 19:27:47.402747 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b0792c39-e57a-412f-96b3-c40605b6b146-scripts\") pod \"nova-cell1-cell-mapping-97n2m\" (UID: \"b0792c39-e57a-412f-96b3-c40605b6b146\") " pod="openstack/nova-cell1-cell-mapping-97n2m" Sep 29 19:27:47 crc kubenswrapper[4779]: I0929 19:27:47.402858 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zmlkb\" (UniqueName: \"kubernetes.io/projected/b0792c39-e57a-412f-96b3-c40605b6b146-kube-api-access-zmlkb\") pod \"nova-cell1-cell-mapping-97n2m\" (UID: \"b0792c39-e57a-412f-96b3-c40605b6b146\") " pod="openstack/nova-cell1-cell-mapping-97n2m" Sep 29 19:27:47 crc kubenswrapper[4779]: I0929 19:27:47.402923 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b0792c39-e57a-412f-96b3-c40605b6b146-config-data\") pod \"nova-cell1-cell-mapping-97n2m\" (UID: \"b0792c39-e57a-412f-96b3-c40605b6b146\") " pod="openstack/nova-cell1-cell-mapping-97n2m" Sep 29 19:27:47 crc kubenswrapper[4779]: I0929 19:27:47.402997 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b0792c39-e57a-412f-96b3-c40605b6b146-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-97n2m\" (UID: \"b0792c39-e57a-412f-96b3-c40605b6b146\") " pod="openstack/nova-cell1-cell-mapping-97n2m" Sep 29 19:27:47 crc kubenswrapper[4779]: I0929 19:27:47.409535 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b0792c39-e57a-412f-96b3-c40605b6b146-scripts\") pod \"nova-cell1-cell-mapping-97n2m\" (UID: \"b0792c39-e57a-412f-96b3-c40605b6b146\") " pod="openstack/nova-cell1-cell-mapping-97n2m" Sep 29 19:27:47 crc kubenswrapper[4779]: I0929 19:27:47.409654 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b0792c39-e57a-412f-96b3-c40605b6b146-config-data\") pod \"nova-cell1-cell-mapping-97n2m\" (UID: \"b0792c39-e57a-412f-96b3-c40605b6b146\") " pod="openstack/nova-cell1-cell-mapping-97n2m" Sep 29 19:27:47 crc kubenswrapper[4779]: I0929 19:27:47.410003 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b0792c39-e57a-412f-96b3-c40605b6b146-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-97n2m\" (UID: \"b0792c39-e57a-412f-96b3-c40605b6b146\") " pod="openstack/nova-cell1-cell-mapping-97n2m" Sep 29 19:27:47 crc kubenswrapper[4779]: I0929 19:27:47.420853 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zmlkb\" (UniqueName: \"kubernetes.io/projected/b0792c39-e57a-412f-96b3-c40605b6b146-kube-api-access-zmlkb\") pod \"nova-cell1-cell-mapping-97n2m\" (UID: \"b0792c39-e57a-412f-96b3-c40605b6b146\") " pod="openstack/nova-cell1-cell-mapping-97n2m" Sep 29 19:27:47 crc kubenswrapper[4779]: I0929 19:27:47.444058 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Sep 29 19:27:47 crc kubenswrapper[4779]: I0929 19:27:47.444165 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Sep 29 19:27:47 crc kubenswrapper[4779]: I0929 19:27:47.453708 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-97n2m" Sep 29 19:27:47 crc kubenswrapper[4779]: I0929 19:27:47.480593 4779 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="7a6eecd5-c53f-4f79-b8d4-ffd8527ce942" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.0.193:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Sep 29 19:27:47 crc kubenswrapper[4779]: I0929 19:27:47.480624 4779 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="7a6eecd5-c53f-4f79-b8d4-ffd8527ce942" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.0.193:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Sep 29 19:27:47 crc kubenswrapper[4779]: I0929 19:27:47.524637 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Sep 29 19:27:47 crc kubenswrapper[4779]: I0929 19:27:47.563625 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Sep 29 19:27:47 crc kubenswrapper[4779]: I0929 19:27:47.940120 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-cell-mapping-97n2m"] Sep 29 19:27:48 crc kubenswrapper[4779]: I0929 19:27:48.146825 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-97n2m" event={"ID":"b0792c39-e57a-412f-96b3-c40605b6b146","Type":"ContainerStarted","Data":"ecb99029c6153550ca912aa5f524f76a477af64b2ac01c3ad77b4beb39652991"} Sep 29 19:27:48 crc kubenswrapper[4779]: I0929 19:27:48.147263 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-97n2m" event={"ID":"b0792c39-e57a-412f-96b3-c40605b6b146","Type":"ContainerStarted","Data":"87baa8b712cd6a39b8a8f4f5e4e1736935b4f952450e372bd1f63067a15f2646"} Sep 29 19:27:48 crc kubenswrapper[4779]: E0929 19:27:48.156978 4779 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1ffdc490_1007_41e8_a410_97a78d400395.slice/crio-884b7083c272788693584552bc8512f7ab999b2bfeffc1ead266268aaa1801d5\": RecentStats: unable to find data in memory cache]" Sep 29 19:27:48 crc kubenswrapper[4779]: I0929 19:27:48.167043 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-cell-mapping-97n2m" podStartSLOduration=1.167025835 podStartE2EDuration="1.167025835s" podCreationTimestamp="2025-09-29 19:27:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 19:27:48.165922574 +0000 UTC m=+1179.050347684" watchObservedRunningTime="2025-09-29 19:27:48.167025835 +0000 UTC m=+1179.051450955" Sep 29 19:27:48 crc kubenswrapper[4779]: I0929 19:27:48.182562 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Sep 29 19:27:48 crc kubenswrapper[4779]: I0929 19:27:48.451582 4779 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="47bbd60f-e99d-48c8-94ba-908fc7162be3" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.195:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Sep 29 19:27:48 crc kubenswrapper[4779]: I0929 19:27:48.458701 4779 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="47bbd60f-e99d-48c8-94ba-908fc7162be3" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.195:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Sep 29 19:27:53 crc kubenswrapper[4779]: I0929 19:27:53.205776 4779 generic.go:334] "Generic (PLEG): container finished" podID="b0792c39-e57a-412f-96b3-c40605b6b146" containerID="ecb99029c6153550ca912aa5f524f76a477af64b2ac01c3ad77b4beb39652991" exitCode=0 Sep 29 19:27:53 crc kubenswrapper[4779]: I0929 19:27:53.205975 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-97n2m" event={"ID":"b0792c39-e57a-412f-96b3-c40605b6b146","Type":"ContainerDied","Data":"ecb99029c6153550ca912aa5f524f76a477af64b2ac01c3ad77b4beb39652991"} Sep 29 19:27:54 crc kubenswrapper[4779]: I0929 19:27:54.685384 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-97n2m" Sep 29 19:27:54 crc kubenswrapper[4779]: I0929 19:27:54.873731 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b0792c39-e57a-412f-96b3-c40605b6b146-config-data\") pod \"b0792c39-e57a-412f-96b3-c40605b6b146\" (UID: \"b0792c39-e57a-412f-96b3-c40605b6b146\") " Sep 29 19:27:54 crc kubenswrapper[4779]: I0929 19:27:54.873885 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zmlkb\" (UniqueName: \"kubernetes.io/projected/b0792c39-e57a-412f-96b3-c40605b6b146-kube-api-access-zmlkb\") pod \"b0792c39-e57a-412f-96b3-c40605b6b146\" (UID: \"b0792c39-e57a-412f-96b3-c40605b6b146\") " Sep 29 19:27:54 crc kubenswrapper[4779]: I0929 19:27:54.873996 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b0792c39-e57a-412f-96b3-c40605b6b146-combined-ca-bundle\") pod \"b0792c39-e57a-412f-96b3-c40605b6b146\" (UID: \"b0792c39-e57a-412f-96b3-c40605b6b146\") " Sep 29 19:27:54 crc kubenswrapper[4779]: I0929 19:27:54.874134 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b0792c39-e57a-412f-96b3-c40605b6b146-scripts\") pod \"b0792c39-e57a-412f-96b3-c40605b6b146\" (UID: \"b0792c39-e57a-412f-96b3-c40605b6b146\") " Sep 29 19:27:54 crc kubenswrapper[4779]: I0929 19:27:54.879646 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b0792c39-e57a-412f-96b3-c40605b6b146-kube-api-access-zmlkb" (OuterVolumeSpecName: "kube-api-access-zmlkb") pod "b0792c39-e57a-412f-96b3-c40605b6b146" (UID: "b0792c39-e57a-412f-96b3-c40605b6b146"). InnerVolumeSpecName "kube-api-access-zmlkb". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:27:54 crc kubenswrapper[4779]: I0929 19:27:54.886608 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b0792c39-e57a-412f-96b3-c40605b6b146-scripts" (OuterVolumeSpecName: "scripts") pod "b0792c39-e57a-412f-96b3-c40605b6b146" (UID: "b0792c39-e57a-412f-96b3-c40605b6b146"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:27:54 crc kubenswrapper[4779]: I0929 19:27:54.921465 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b0792c39-e57a-412f-96b3-c40605b6b146-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b0792c39-e57a-412f-96b3-c40605b6b146" (UID: "b0792c39-e57a-412f-96b3-c40605b6b146"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:27:54 crc kubenswrapper[4779]: I0929 19:27:54.923225 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b0792c39-e57a-412f-96b3-c40605b6b146-config-data" (OuterVolumeSpecName: "config-data") pod "b0792c39-e57a-412f-96b3-c40605b6b146" (UID: "b0792c39-e57a-412f-96b3-c40605b6b146"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:27:54 crc kubenswrapper[4779]: I0929 19:27:54.976584 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zmlkb\" (UniqueName: \"kubernetes.io/projected/b0792c39-e57a-412f-96b3-c40605b6b146-kube-api-access-zmlkb\") on node \"crc\" DevicePath \"\"" Sep 29 19:27:54 crc kubenswrapper[4779]: I0929 19:27:54.976748 4779 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b0792c39-e57a-412f-96b3-c40605b6b146-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 19:27:54 crc kubenswrapper[4779]: I0929 19:27:54.976856 4779 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b0792c39-e57a-412f-96b3-c40605b6b146-scripts\") on node \"crc\" DevicePath \"\"" Sep 29 19:27:54 crc kubenswrapper[4779]: I0929 19:27:54.976948 4779 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b0792c39-e57a-412f-96b3-c40605b6b146-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 19:27:55 crc kubenswrapper[4779]: I0929 19:27:55.235075 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-97n2m" event={"ID":"b0792c39-e57a-412f-96b3-c40605b6b146","Type":"ContainerDied","Data":"87baa8b712cd6a39b8a8f4f5e4e1736935b4f952450e372bd1f63067a15f2646"} Sep 29 19:27:55 crc kubenswrapper[4779]: I0929 19:27:55.235117 4779 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="87baa8b712cd6a39b8a8f4f5e4e1736935b4f952450e372bd1f63067a15f2646" Sep 29 19:27:55 crc kubenswrapper[4779]: I0929 19:27:55.235168 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-97n2m" Sep 29 19:27:55 crc kubenswrapper[4779]: I0929 19:27:55.441790 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Sep 29 19:27:55 crc kubenswrapper[4779]: I0929 19:27:55.442059 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="7a6eecd5-c53f-4f79-b8d4-ffd8527ce942" containerName="nova-api-log" containerID="cri-o://07f48468de66627bca6c629bfc0a1dd138a75f36b207b497b23853f0c3e7d5be" gracePeriod=30 Sep 29 19:27:55 crc kubenswrapper[4779]: I0929 19:27:55.442124 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="7a6eecd5-c53f-4f79-b8d4-ffd8527ce942" containerName="nova-api-api" containerID="cri-o://0821dddac90581c34ed5415260b05da4f7cc53737b66f9c5c95e3ec1f9d7fb71" gracePeriod=30 Sep 29 19:27:55 crc kubenswrapper[4779]: I0929 19:27:55.497098 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Sep 29 19:27:55 crc kubenswrapper[4779]: I0929 19:27:55.497388 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="57f1ca09-c341-4f3c-a1e0-e9f27777601f" containerName="nova-scheduler-scheduler" containerID="cri-o://a15cd7424bab1deb21019137d5fa5cbaf35ed4c9e748e3a29c86bf966b587789" gracePeriod=30 Sep 29 19:27:55 crc kubenswrapper[4779]: I0929 19:27:55.506604 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Sep 29 19:27:55 crc kubenswrapper[4779]: I0929 19:27:55.506837 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="47bbd60f-e99d-48c8-94ba-908fc7162be3" containerName="nova-metadata-log" containerID="cri-o://eb9e617ee618549e7b758785be3a39731b7900b47f170efd4afa10ea66f280f3" gracePeriod=30 Sep 29 19:27:55 crc kubenswrapper[4779]: I0929 19:27:55.506970 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="47bbd60f-e99d-48c8-94ba-908fc7162be3" containerName="nova-metadata-metadata" containerID="cri-o://48fa555574bb15ab0721dd6cf2a3aa42037b9f87b0f0b65f12d7360609fa31df" gracePeriod=30 Sep 29 19:27:56 crc kubenswrapper[4779]: I0929 19:27:56.258578 4779 generic.go:334] "Generic (PLEG): container finished" podID="47bbd60f-e99d-48c8-94ba-908fc7162be3" containerID="eb9e617ee618549e7b758785be3a39731b7900b47f170efd4afa10ea66f280f3" exitCode=143 Sep 29 19:27:56 crc kubenswrapper[4779]: I0929 19:27:56.258901 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"47bbd60f-e99d-48c8-94ba-908fc7162be3","Type":"ContainerDied","Data":"eb9e617ee618549e7b758785be3a39731b7900b47f170efd4afa10ea66f280f3"} Sep 29 19:27:56 crc kubenswrapper[4779]: I0929 19:27:56.266450 4779 generic.go:334] "Generic (PLEG): container finished" podID="7a6eecd5-c53f-4f79-b8d4-ffd8527ce942" containerID="07f48468de66627bca6c629bfc0a1dd138a75f36b207b497b23853f0c3e7d5be" exitCode=143 Sep 29 19:27:56 crc kubenswrapper[4779]: I0929 19:27:56.266745 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"7a6eecd5-c53f-4f79-b8d4-ffd8527ce942","Type":"ContainerDied","Data":"07f48468de66627bca6c629bfc0a1dd138a75f36b207b497b23853f0c3e7d5be"} Sep 29 19:27:57 crc kubenswrapper[4779]: E0929 19:27:57.528303 4779 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="a15cd7424bab1deb21019137d5fa5cbaf35ed4c9e748e3a29c86bf966b587789" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Sep 29 19:27:57 crc kubenswrapper[4779]: E0929 19:27:57.530615 4779 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="a15cd7424bab1deb21019137d5fa5cbaf35ed4c9e748e3a29c86bf966b587789" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Sep 29 19:27:57 crc kubenswrapper[4779]: E0929 19:27:57.532658 4779 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="a15cd7424bab1deb21019137d5fa5cbaf35ed4c9e748e3a29c86bf966b587789" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Sep 29 19:27:57 crc kubenswrapper[4779]: E0929 19:27:57.532710 4779 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/nova-scheduler-0" podUID="57f1ca09-c341-4f3c-a1e0-e9f27777601f" containerName="nova-scheduler-scheduler" Sep 29 19:27:58 crc kubenswrapper[4779]: E0929 19:27:58.478536 4779 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1ffdc490_1007_41e8_a410_97a78d400395.slice/crio-884b7083c272788693584552bc8512f7ab999b2bfeffc1ead266268aaa1801d5\": RecentStats: unable to find data in memory cache]" Sep 29 19:27:59 crc kubenswrapper[4779]: I0929 19:27:59.089892 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Sep 29 19:27:59 crc kubenswrapper[4779]: I0929 19:27:59.095571 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Sep 29 19:27:59 crc kubenswrapper[4779]: I0929 19:27:59.246445 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/47bbd60f-e99d-48c8-94ba-908fc7162be3-logs\") pod \"47bbd60f-e99d-48c8-94ba-908fc7162be3\" (UID: \"47bbd60f-e99d-48c8-94ba-908fc7162be3\") " Sep 29 19:27:59 crc kubenswrapper[4779]: I0929 19:27:59.246524 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/47bbd60f-e99d-48c8-94ba-908fc7162be3-nova-metadata-tls-certs\") pod \"47bbd60f-e99d-48c8-94ba-908fc7162be3\" (UID: \"47bbd60f-e99d-48c8-94ba-908fc7162be3\") " Sep 29 19:27:59 crc kubenswrapper[4779]: I0929 19:27:59.246608 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7a6eecd5-c53f-4f79-b8d4-ffd8527ce942-combined-ca-bundle\") pod \"7a6eecd5-c53f-4f79-b8d4-ffd8527ce942\" (UID: \"7a6eecd5-c53f-4f79-b8d4-ffd8527ce942\") " Sep 29 19:27:59 crc kubenswrapper[4779]: I0929 19:27:59.246722 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/47bbd60f-e99d-48c8-94ba-908fc7162be3-config-data\") pod \"47bbd60f-e99d-48c8-94ba-908fc7162be3\" (UID: \"47bbd60f-e99d-48c8-94ba-908fc7162be3\") " Sep 29 19:27:59 crc kubenswrapper[4779]: I0929 19:27:59.246945 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pxg96\" (UniqueName: \"kubernetes.io/projected/7a6eecd5-c53f-4f79-b8d4-ffd8527ce942-kube-api-access-pxg96\") pod \"7a6eecd5-c53f-4f79-b8d4-ffd8527ce942\" (UID: \"7a6eecd5-c53f-4f79-b8d4-ffd8527ce942\") " Sep 29 19:27:59 crc kubenswrapper[4779]: I0929 19:27:59.247070 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/47bbd60f-e99d-48c8-94ba-908fc7162be3-combined-ca-bundle\") pod \"47bbd60f-e99d-48c8-94ba-908fc7162be3\" (UID: \"47bbd60f-e99d-48c8-94ba-908fc7162be3\") " Sep 29 19:27:59 crc kubenswrapper[4779]: I0929 19:27:59.247114 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7a6eecd5-c53f-4f79-b8d4-ffd8527ce942-config-data\") pod \"7a6eecd5-c53f-4f79-b8d4-ffd8527ce942\" (UID: \"7a6eecd5-c53f-4f79-b8d4-ffd8527ce942\") " Sep 29 19:27:59 crc kubenswrapper[4779]: I0929 19:27:59.247150 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7a6eecd5-c53f-4f79-b8d4-ffd8527ce942-logs\") pod \"7a6eecd5-c53f-4f79-b8d4-ffd8527ce942\" (UID: \"7a6eecd5-c53f-4f79-b8d4-ffd8527ce942\") " Sep 29 19:27:59 crc kubenswrapper[4779]: I0929 19:27:59.247260 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-r6d4c\" (UniqueName: \"kubernetes.io/projected/47bbd60f-e99d-48c8-94ba-908fc7162be3-kube-api-access-r6d4c\") pod \"47bbd60f-e99d-48c8-94ba-908fc7162be3\" (UID: \"47bbd60f-e99d-48c8-94ba-908fc7162be3\") " Sep 29 19:27:59 crc kubenswrapper[4779]: I0929 19:27:59.249285 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/47bbd60f-e99d-48c8-94ba-908fc7162be3-logs" (OuterVolumeSpecName: "logs") pod "47bbd60f-e99d-48c8-94ba-908fc7162be3" (UID: "47bbd60f-e99d-48c8-94ba-908fc7162be3"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 19:27:59 crc kubenswrapper[4779]: I0929 19:27:59.250053 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7a6eecd5-c53f-4f79-b8d4-ffd8527ce942-logs" (OuterVolumeSpecName: "logs") pod "7a6eecd5-c53f-4f79-b8d4-ffd8527ce942" (UID: "7a6eecd5-c53f-4f79-b8d4-ffd8527ce942"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 19:27:59 crc kubenswrapper[4779]: I0929 19:27:59.265005 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/47bbd60f-e99d-48c8-94ba-908fc7162be3-kube-api-access-r6d4c" (OuterVolumeSpecName: "kube-api-access-r6d4c") pod "47bbd60f-e99d-48c8-94ba-908fc7162be3" (UID: "47bbd60f-e99d-48c8-94ba-908fc7162be3"). InnerVolumeSpecName "kube-api-access-r6d4c". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:27:59 crc kubenswrapper[4779]: I0929 19:27:59.277928 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7a6eecd5-c53f-4f79-b8d4-ffd8527ce942-kube-api-access-pxg96" (OuterVolumeSpecName: "kube-api-access-pxg96") pod "7a6eecd5-c53f-4f79-b8d4-ffd8527ce942" (UID: "7a6eecd5-c53f-4f79-b8d4-ffd8527ce942"). InnerVolumeSpecName "kube-api-access-pxg96". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:27:59 crc kubenswrapper[4779]: I0929 19:27:59.287051 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/47bbd60f-e99d-48c8-94ba-908fc7162be3-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "47bbd60f-e99d-48c8-94ba-908fc7162be3" (UID: "47bbd60f-e99d-48c8-94ba-908fc7162be3"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:27:59 crc kubenswrapper[4779]: I0929 19:27:59.297124 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7a6eecd5-c53f-4f79-b8d4-ffd8527ce942-config-data" (OuterVolumeSpecName: "config-data") pod "7a6eecd5-c53f-4f79-b8d4-ffd8527ce942" (UID: "7a6eecd5-c53f-4f79-b8d4-ffd8527ce942"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:27:59 crc kubenswrapper[4779]: I0929 19:27:59.297732 4779 generic.go:334] "Generic (PLEG): container finished" podID="47bbd60f-e99d-48c8-94ba-908fc7162be3" containerID="48fa555574bb15ab0721dd6cf2a3aa42037b9f87b0f0b65f12d7360609fa31df" exitCode=0 Sep 29 19:27:59 crc kubenswrapper[4779]: I0929 19:27:59.297801 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"47bbd60f-e99d-48c8-94ba-908fc7162be3","Type":"ContainerDied","Data":"48fa555574bb15ab0721dd6cf2a3aa42037b9f87b0f0b65f12d7360609fa31df"} Sep 29 19:27:59 crc kubenswrapper[4779]: I0929 19:27:59.297835 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"47bbd60f-e99d-48c8-94ba-908fc7162be3","Type":"ContainerDied","Data":"e01759e90c577b0a56e74c4f1687ad0b5c308b21fbbe7e331b891ddfd25f8078"} Sep 29 19:27:59 crc kubenswrapper[4779]: I0929 19:27:59.297856 4779 scope.go:117] "RemoveContainer" containerID="48fa555574bb15ab0721dd6cf2a3aa42037b9f87b0f0b65f12d7360609fa31df" Sep 29 19:27:59 crc kubenswrapper[4779]: I0929 19:27:59.297992 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Sep 29 19:27:59 crc kubenswrapper[4779]: I0929 19:27:59.301450 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7a6eecd5-c53f-4f79-b8d4-ffd8527ce942-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "7a6eecd5-c53f-4f79-b8d4-ffd8527ce942" (UID: "7a6eecd5-c53f-4f79-b8d4-ffd8527ce942"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:27:59 crc kubenswrapper[4779]: I0929 19:27:59.302722 4779 generic.go:334] "Generic (PLEG): container finished" podID="7a6eecd5-c53f-4f79-b8d4-ffd8527ce942" containerID="0821dddac90581c34ed5415260b05da4f7cc53737b66f9c5c95e3ec1f9d7fb71" exitCode=0 Sep 29 19:27:59 crc kubenswrapper[4779]: I0929 19:27:59.302855 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"7a6eecd5-c53f-4f79-b8d4-ffd8527ce942","Type":"ContainerDied","Data":"0821dddac90581c34ed5415260b05da4f7cc53737b66f9c5c95e3ec1f9d7fb71"} Sep 29 19:27:59 crc kubenswrapper[4779]: I0929 19:27:59.302958 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"7a6eecd5-c53f-4f79-b8d4-ffd8527ce942","Type":"ContainerDied","Data":"a41ee5aed87fad2bef4f11d37c69417d37a2d73c1edf22efd65a3c29ad982889"} Sep 29 19:27:59 crc kubenswrapper[4779]: I0929 19:27:59.302980 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Sep 29 19:27:59 crc kubenswrapper[4779]: I0929 19:27:59.315457 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/47bbd60f-e99d-48c8-94ba-908fc7162be3-config-data" (OuterVolumeSpecName: "config-data") pod "47bbd60f-e99d-48c8-94ba-908fc7162be3" (UID: "47bbd60f-e99d-48c8-94ba-908fc7162be3"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:27:59 crc kubenswrapper[4779]: I0929 19:27:59.321192 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/47bbd60f-e99d-48c8-94ba-908fc7162be3-nova-metadata-tls-certs" (OuterVolumeSpecName: "nova-metadata-tls-certs") pod "47bbd60f-e99d-48c8-94ba-908fc7162be3" (UID: "47bbd60f-e99d-48c8-94ba-908fc7162be3"). InnerVolumeSpecName "nova-metadata-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:27:59 crc kubenswrapper[4779]: I0929 19:27:59.349282 4779 scope.go:117] "RemoveContainer" containerID="eb9e617ee618549e7b758785be3a39731b7900b47f170efd4afa10ea66f280f3" Sep 29 19:27:59 crc kubenswrapper[4779]: I0929 19:27:59.354283 4779 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/47bbd60f-e99d-48c8-94ba-908fc7162be3-logs\") on node \"crc\" DevicePath \"\"" Sep 29 19:27:59 crc kubenswrapper[4779]: I0929 19:27:59.354350 4779 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/47bbd60f-e99d-48c8-94ba-908fc7162be3-nova-metadata-tls-certs\") on node \"crc\" DevicePath \"\"" Sep 29 19:27:59 crc kubenswrapper[4779]: I0929 19:27:59.354369 4779 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7a6eecd5-c53f-4f79-b8d4-ffd8527ce942-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 19:27:59 crc kubenswrapper[4779]: I0929 19:27:59.354383 4779 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/47bbd60f-e99d-48c8-94ba-908fc7162be3-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 19:27:59 crc kubenswrapper[4779]: I0929 19:27:59.354396 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pxg96\" (UniqueName: \"kubernetes.io/projected/7a6eecd5-c53f-4f79-b8d4-ffd8527ce942-kube-api-access-pxg96\") on node \"crc\" DevicePath \"\"" Sep 29 19:27:59 crc kubenswrapper[4779]: I0929 19:27:59.354408 4779 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/47bbd60f-e99d-48c8-94ba-908fc7162be3-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 19:27:59 crc kubenswrapper[4779]: I0929 19:27:59.354420 4779 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7a6eecd5-c53f-4f79-b8d4-ffd8527ce942-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 19:27:59 crc kubenswrapper[4779]: I0929 19:27:59.354430 4779 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7a6eecd5-c53f-4f79-b8d4-ffd8527ce942-logs\") on node \"crc\" DevicePath \"\"" Sep 29 19:27:59 crc kubenswrapper[4779]: I0929 19:27:59.354440 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-r6d4c\" (UniqueName: \"kubernetes.io/projected/47bbd60f-e99d-48c8-94ba-908fc7162be3-kube-api-access-r6d4c\") on node \"crc\" DevicePath \"\"" Sep 29 19:27:59 crc kubenswrapper[4779]: I0929 19:27:59.357444 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Sep 29 19:27:59 crc kubenswrapper[4779]: I0929 19:27:59.373612 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Sep 29 19:27:59 crc kubenswrapper[4779]: I0929 19:27:59.381184 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Sep 29 19:27:59 crc kubenswrapper[4779]: E0929 19:27:59.381673 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="47bbd60f-e99d-48c8-94ba-908fc7162be3" containerName="nova-metadata-metadata" Sep 29 19:27:59 crc kubenswrapper[4779]: I0929 19:27:59.381691 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="47bbd60f-e99d-48c8-94ba-908fc7162be3" containerName="nova-metadata-metadata" Sep 29 19:27:59 crc kubenswrapper[4779]: E0929 19:27:59.381715 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b0792c39-e57a-412f-96b3-c40605b6b146" containerName="nova-manage" Sep 29 19:27:59 crc kubenswrapper[4779]: I0929 19:27:59.381721 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="b0792c39-e57a-412f-96b3-c40605b6b146" containerName="nova-manage" Sep 29 19:27:59 crc kubenswrapper[4779]: E0929 19:27:59.381737 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="47bbd60f-e99d-48c8-94ba-908fc7162be3" containerName="nova-metadata-log" Sep 29 19:27:59 crc kubenswrapper[4779]: I0929 19:27:59.381743 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="47bbd60f-e99d-48c8-94ba-908fc7162be3" containerName="nova-metadata-log" Sep 29 19:27:59 crc kubenswrapper[4779]: E0929 19:27:59.381751 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7a6eecd5-c53f-4f79-b8d4-ffd8527ce942" containerName="nova-api-api" Sep 29 19:27:59 crc kubenswrapper[4779]: I0929 19:27:59.381757 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="7a6eecd5-c53f-4f79-b8d4-ffd8527ce942" containerName="nova-api-api" Sep 29 19:27:59 crc kubenswrapper[4779]: E0929 19:27:59.381772 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7a6eecd5-c53f-4f79-b8d4-ffd8527ce942" containerName="nova-api-log" Sep 29 19:27:59 crc kubenswrapper[4779]: I0929 19:27:59.381778 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="7a6eecd5-c53f-4f79-b8d4-ffd8527ce942" containerName="nova-api-log" Sep 29 19:27:59 crc kubenswrapper[4779]: I0929 19:27:59.381948 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="7a6eecd5-c53f-4f79-b8d4-ffd8527ce942" containerName="nova-api-log" Sep 29 19:27:59 crc kubenswrapper[4779]: I0929 19:27:59.381964 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="47bbd60f-e99d-48c8-94ba-908fc7162be3" containerName="nova-metadata-metadata" Sep 29 19:27:59 crc kubenswrapper[4779]: I0929 19:27:59.381982 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="7a6eecd5-c53f-4f79-b8d4-ffd8527ce942" containerName="nova-api-api" Sep 29 19:27:59 crc kubenswrapper[4779]: I0929 19:27:59.381998 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="b0792c39-e57a-412f-96b3-c40605b6b146" containerName="nova-manage" Sep 29 19:27:59 crc kubenswrapper[4779]: I0929 19:27:59.382015 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="47bbd60f-e99d-48c8-94ba-908fc7162be3" containerName="nova-metadata-log" Sep 29 19:27:59 crc kubenswrapper[4779]: I0929 19:27:59.382993 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Sep 29 19:27:59 crc kubenswrapper[4779]: I0929 19:27:59.383047 4779 scope.go:117] "RemoveContainer" containerID="48fa555574bb15ab0721dd6cf2a3aa42037b9f87b0f0b65f12d7360609fa31df" Sep 29 19:27:59 crc kubenswrapper[4779]: E0929 19:27:59.384516 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"48fa555574bb15ab0721dd6cf2a3aa42037b9f87b0f0b65f12d7360609fa31df\": container with ID starting with 48fa555574bb15ab0721dd6cf2a3aa42037b9f87b0f0b65f12d7360609fa31df not found: ID does not exist" containerID="48fa555574bb15ab0721dd6cf2a3aa42037b9f87b0f0b65f12d7360609fa31df" Sep 29 19:27:59 crc kubenswrapper[4779]: I0929 19:27:59.384632 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"48fa555574bb15ab0721dd6cf2a3aa42037b9f87b0f0b65f12d7360609fa31df"} err="failed to get container status \"48fa555574bb15ab0721dd6cf2a3aa42037b9f87b0f0b65f12d7360609fa31df\": rpc error: code = NotFound desc = could not find container \"48fa555574bb15ab0721dd6cf2a3aa42037b9f87b0f0b65f12d7360609fa31df\": container with ID starting with 48fa555574bb15ab0721dd6cf2a3aa42037b9f87b0f0b65f12d7360609fa31df not found: ID does not exist" Sep 29 19:27:59 crc kubenswrapper[4779]: I0929 19:27:59.384720 4779 scope.go:117] "RemoveContainer" containerID="eb9e617ee618549e7b758785be3a39731b7900b47f170efd4afa10ea66f280f3" Sep 29 19:27:59 crc kubenswrapper[4779]: E0929 19:27:59.385217 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"eb9e617ee618549e7b758785be3a39731b7900b47f170efd4afa10ea66f280f3\": container with ID starting with eb9e617ee618549e7b758785be3a39731b7900b47f170efd4afa10ea66f280f3 not found: ID does not exist" containerID="eb9e617ee618549e7b758785be3a39731b7900b47f170efd4afa10ea66f280f3" Sep 29 19:27:59 crc kubenswrapper[4779]: I0929 19:27:59.385248 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"eb9e617ee618549e7b758785be3a39731b7900b47f170efd4afa10ea66f280f3"} err="failed to get container status \"eb9e617ee618549e7b758785be3a39731b7900b47f170efd4afa10ea66f280f3\": rpc error: code = NotFound desc = could not find container \"eb9e617ee618549e7b758785be3a39731b7900b47f170efd4afa10ea66f280f3\": container with ID starting with eb9e617ee618549e7b758785be3a39731b7900b47f170efd4afa10ea66f280f3 not found: ID does not exist" Sep 29 19:27:59 crc kubenswrapper[4779]: I0929 19:27:59.385268 4779 scope.go:117] "RemoveContainer" containerID="0821dddac90581c34ed5415260b05da4f7cc53737b66f9c5c95e3ec1f9d7fb71" Sep 29 19:27:59 crc kubenswrapper[4779]: I0929 19:27:59.385419 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Sep 29 19:27:59 crc kubenswrapper[4779]: I0929 19:27:59.389948 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Sep 29 19:27:59 crc kubenswrapper[4779]: I0929 19:27:59.413050 4779 scope.go:117] "RemoveContainer" containerID="07f48468de66627bca6c629bfc0a1dd138a75f36b207b497b23853f0c3e7d5be" Sep 29 19:27:59 crc kubenswrapper[4779]: I0929 19:27:59.434893 4779 scope.go:117] "RemoveContainer" containerID="0821dddac90581c34ed5415260b05da4f7cc53737b66f9c5c95e3ec1f9d7fb71" Sep 29 19:27:59 crc kubenswrapper[4779]: E0929 19:27:59.435373 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0821dddac90581c34ed5415260b05da4f7cc53737b66f9c5c95e3ec1f9d7fb71\": container with ID starting with 0821dddac90581c34ed5415260b05da4f7cc53737b66f9c5c95e3ec1f9d7fb71 not found: ID does not exist" containerID="0821dddac90581c34ed5415260b05da4f7cc53737b66f9c5c95e3ec1f9d7fb71" Sep 29 19:27:59 crc kubenswrapper[4779]: I0929 19:27:59.435413 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0821dddac90581c34ed5415260b05da4f7cc53737b66f9c5c95e3ec1f9d7fb71"} err="failed to get container status \"0821dddac90581c34ed5415260b05da4f7cc53737b66f9c5c95e3ec1f9d7fb71\": rpc error: code = NotFound desc = could not find container \"0821dddac90581c34ed5415260b05da4f7cc53737b66f9c5c95e3ec1f9d7fb71\": container with ID starting with 0821dddac90581c34ed5415260b05da4f7cc53737b66f9c5c95e3ec1f9d7fb71 not found: ID does not exist" Sep 29 19:27:59 crc kubenswrapper[4779]: I0929 19:27:59.435440 4779 scope.go:117] "RemoveContainer" containerID="07f48468de66627bca6c629bfc0a1dd138a75f36b207b497b23853f0c3e7d5be" Sep 29 19:27:59 crc kubenswrapper[4779]: E0929 19:27:59.435714 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"07f48468de66627bca6c629bfc0a1dd138a75f36b207b497b23853f0c3e7d5be\": container with ID starting with 07f48468de66627bca6c629bfc0a1dd138a75f36b207b497b23853f0c3e7d5be not found: ID does not exist" containerID="07f48468de66627bca6c629bfc0a1dd138a75f36b207b497b23853f0c3e7d5be" Sep 29 19:27:59 crc kubenswrapper[4779]: I0929 19:27:59.435766 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"07f48468de66627bca6c629bfc0a1dd138a75f36b207b497b23853f0c3e7d5be"} err="failed to get container status \"07f48468de66627bca6c629bfc0a1dd138a75f36b207b497b23853f0c3e7d5be\": rpc error: code = NotFound desc = could not find container \"07f48468de66627bca6c629bfc0a1dd138a75f36b207b497b23853f0c3e7d5be\": container with ID starting with 07f48468de66627bca6c629bfc0a1dd138a75f36b207b497b23853f0c3e7d5be not found: ID does not exist" Sep 29 19:27:59 crc kubenswrapper[4779]: I0929 19:27:59.558091 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b1327ac7-e940-435f-b344-f03e2c406800-config-data\") pod \"nova-api-0\" (UID: \"b1327ac7-e940-435f-b344-f03e2c406800\") " pod="openstack/nova-api-0" Sep 29 19:27:59 crc kubenswrapper[4779]: I0929 19:27:59.558152 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b1327ac7-e940-435f-b344-f03e2c406800-logs\") pod \"nova-api-0\" (UID: \"b1327ac7-e940-435f-b344-f03e2c406800\") " pod="openstack/nova-api-0" Sep 29 19:27:59 crc kubenswrapper[4779]: I0929 19:27:59.558184 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-krnhq\" (UniqueName: \"kubernetes.io/projected/b1327ac7-e940-435f-b344-f03e2c406800-kube-api-access-krnhq\") pod \"nova-api-0\" (UID: \"b1327ac7-e940-435f-b344-f03e2c406800\") " pod="openstack/nova-api-0" Sep 29 19:27:59 crc kubenswrapper[4779]: I0929 19:27:59.558466 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b1327ac7-e940-435f-b344-f03e2c406800-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"b1327ac7-e940-435f-b344-f03e2c406800\") " pod="openstack/nova-api-0" Sep 29 19:27:59 crc kubenswrapper[4779]: I0929 19:27:59.621782 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Sep 29 19:27:59 crc kubenswrapper[4779]: I0929 19:27:59.638236 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Sep 29 19:27:59 crc kubenswrapper[4779]: I0929 19:27:59.651447 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Sep 29 19:27:59 crc kubenswrapper[4779]: I0929 19:27:59.659621 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-krnhq\" (UniqueName: \"kubernetes.io/projected/b1327ac7-e940-435f-b344-f03e2c406800-kube-api-access-krnhq\") pod \"nova-api-0\" (UID: \"b1327ac7-e940-435f-b344-f03e2c406800\") " pod="openstack/nova-api-0" Sep 29 19:27:59 crc kubenswrapper[4779]: I0929 19:27:59.659710 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b1327ac7-e940-435f-b344-f03e2c406800-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"b1327ac7-e940-435f-b344-f03e2c406800\") " pod="openstack/nova-api-0" Sep 29 19:27:59 crc kubenswrapper[4779]: I0929 19:27:59.659799 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b1327ac7-e940-435f-b344-f03e2c406800-config-data\") pod \"nova-api-0\" (UID: \"b1327ac7-e940-435f-b344-f03e2c406800\") " pod="openstack/nova-api-0" Sep 29 19:27:59 crc kubenswrapper[4779]: I0929 19:27:59.659846 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b1327ac7-e940-435f-b344-f03e2c406800-logs\") pod \"nova-api-0\" (UID: \"b1327ac7-e940-435f-b344-f03e2c406800\") " pod="openstack/nova-api-0" Sep 29 19:27:59 crc kubenswrapper[4779]: I0929 19:27:59.660414 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b1327ac7-e940-435f-b344-f03e2c406800-logs\") pod \"nova-api-0\" (UID: \"b1327ac7-e940-435f-b344-f03e2c406800\") " pod="openstack/nova-api-0" Sep 29 19:27:59 crc kubenswrapper[4779]: I0929 19:27:59.666190 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b1327ac7-e940-435f-b344-f03e2c406800-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"b1327ac7-e940-435f-b344-f03e2c406800\") " pod="openstack/nova-api-0" Sep 29 19:27:59 crc kubenswrapper[4779]: I0929 19:27:59.679376 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-krnhq\" (UniqueName: \"kubernetes.io/projected/b1327ac7-e940-435f-b344-f03e2c406800-kube-api-access-krnhq\") pod \"nova-api-0\" (UID: \"b1327ac7-e940-435f-b344-f03e2c406800\") " pod="openstack/nova-api-0" Sep 29 19:27:59 crc kubenswrapper[4779]: I0929 19:27:59.680025 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b1327ac7-e940-435f-b344-f03e2c406800-config-data\") pod \"nova-api-0\" (UID: \"b1327ac7-e940-435f-b344-f03e2c406800\") " pod="openstack/nova-api-0" Sep 29 19:27:59 crc kubenswrapper[4779]: I0929 19:27:59.691580 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Sep 29 19:27:59 crc kubenswrapper[4779]: E0929 19:27:59.698431 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="57f1ca09-c341-4f3c-a1e0-e9f27777601f" containerName="nova-scheduler-scheduler" Sep 29 19:27:59 crc kubenswrapper[4779]: I0929 19:27:59.698479 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="57f1ca09-c341-4f3c-a1e0-e9f27777601f" containerName="nova-scheduler-scheduler" Sep 29 19:27:59 crc kubenswrapper[4779]: I0929 19:27:59.698722 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="57f1ca09-c341-4f3c-a1e0-e9f27777601f" containerName="nova-scheduler-scheduler" Sep 29 19:27:59 crc kubenswrapper[4779]: I0929 19:27:59.699706 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Sep 29 19:27:59 crc kubenswrapper[4779]: I0929 19:27:59.702988 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Sep 29 19:27:59 crc kubenswrapper[4779]: I0929 19:27:59.704913 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Sep 29 19:27:59 crc kubenswrapper[4779]: I0929 19:27:59.705089 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Sep 29 19:27:59 crc kubenswrapper[4779]: I0929 19:27:59.715480 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Sep 29 19:27:59 crc kubenswrapper[4779]: I0929 19:27:59.760578 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/57f1ca09-c341-4f3c-a1e0-e9f27777601f-config-data\") pod \"57f1ca09-c341-4f3c-a1e0-e9f27777601f\" (UID: \"57f1ca09-c341-4f3c-a1e0-e9f27777601f\") " Sep 29 19:27:59 crc kubenswrapper[4779]: I0929 19:27:59.760672 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/57f1ca09-c341-4f3c-a1e0-e9f27777601f-combined-ca-bundle\") pod \"57f1ca09-c341-4f3c-a1e0-e9f27777601f\" (UID: \"57f1ca09-c341-4f3c-a1e0-e9f27777601f\") " Sep 29 19:27:59 crc kubenswrapper[4779]: I0929 19:27:59.760761 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-74j6d\" (UniqueName: \"kubernetes.io/projected/57f1ca09-c341-4f3c-a1e0-e9f27777601f-kube-api-access-74j6d\") pod \"57f1ca09-c341-4f3c-a1e0-e9f27777601f\" (UID: \"57f1ca09-c341-4f3c-a1e0-e9f27777601f\") " Sep 29 19:27:59 crc kubenswrapper[4779]: I0929 19:27:59.764557 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/57f1ca09-c341-4f3c-a1e0-e9f27777601f-kube-api-access-74j6d" (OuterVolumeSpecName: "kube-api-access-74j6d") pod "57f1ca09-c341-4f3c-a1e0-e9f27777601f" (UID: "57f1ca09-c341-4f3c-a1e0-e9f27777601f"). InnerVolumeSpecName "kube-api-access-74j6d". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:27:59 crc kubenswrapper[4779]: I0929 19:27:59.790549 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="47bbd60f-e99d-48c8-94ba-908fc7162be3" path="/var/lib/kubelet/pods/47bbd60f-e99d-48c8-94ba-908fc7162be3/volumes" Sep 29 19:27:59 crc kubenswrapper[4779]: I0929 19:27:59.791618 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7a6eecd5-c53f-4f79-b8d4-ffd8527ce942" path="/var/lib/kubelet/pods/7a6eecd5-c53f-4f79-b8d4-ffd8527ce942/volumes" Sep 29 19:27:59 crc kubenswrapper[4779]: I0929 19:27:59.808753 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/57f1ca09-c341-4f3c-a1e0-e9f27777601f-config-data" (OuterVolumeSpecName: "config-data") pod "57f1ca09-c341-4f3c-a1e0-e9f27777601f" (UID: "57f1ca09-c341-4f3c-a1e0-e9f27777601f"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:27:59 crc kubenswrapper[4779]: I0929 19:27:59.809829 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/57f1ca09-c341-4f3c-a1e0-e9f27777601f-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "57f1ca09-c341-4f3c-a1e0-e9f27777601f" (UID: "57f1ca09-c341-4f3c-a1e0-e9f27777601f"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:27:59 crc kubenswrapper[4779]: I0929 19:27:59.874692 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5f5393b3-8bf7-4578-98c9-2323a8ec7ea6-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"5f5393b3-8bf7-4578-98c9-2323a8ec7ea6\") " pod="openstack/nova-metadata-0" Sep 29 19:27:59 crc kubenswrapper[4779]: I0929 19:27:59.874865 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/5f5393b3-8bf7-4578-98c9-2323a8ec7ea6-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"5f5393b3-8bf7-4578-98c9-2323a8ec7ea6\") " pod="openstack/nova-metadata-0" Sep 29 19:27:59 crc kubenswrapper[4779]: I0929 19:27:59.874926 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5f5393b3-8bf7-4578-98c9-2323a8ec7ea6-config-data\") pod \"nova-metadata-0\" (UID: \"5f5393b3-8bf7-4578-98c9-2323a8ec7ea6\") " pod="openstack/nova-metadata-0" Sep 29 19:27:59 crc kubenswrapper[4779]: I0929 19:27:59.874955 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5f5393b3-8bf7-4578-98c9-2323a8ec7ea6-logs\") pod \"nova-metadata-0\" (UID: \"5f5393b3-8bf7-4578-98c9-2323a8ec7ea6\") " pod="openstack/nova-metadata-0" Sep 29 19:27:59 crc kubenswrapper[4779]: I0929 19:27:59.875033 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hcpkz\" (UniqueName: \"kubernetes.io/projected/5f5393b3-8bf7-4578-98c9-2323a8ec7ea6-kube-api-access-hcpkz\") pod \"nova-metadata-0\" (UID: \"5f5393b3-8bf7-4578-98c9-2323a8ec7ea6\") " pod="openstack/nova-metadata-0" Sep 29 19:27:59 crc kubenswrapper[4779]: I0929 19:27:59.875214 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-74j6d\" (UniqueName: \"kubernetes.io/projected/57f1ca09-c341-4f3c-a1e0-e9f27777601f-kube-api-access-74j6d\") on node \"crc\" DevicePath \"\"" Sep 29 19:27:59 crc kubenswrapper[4779]: I0929 19:27:59.875235 4779 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/57f1ca09-c341-4f3c-a1e0-e9f27777601f-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 19:27:59 crc kubenswrapper[4779]: I0929 19:27:59.875248 4779 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/57f1ca09-c341-4f3c-a1e0-e9f27777601f-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 19:27:59 crc kubenswrapper[4779]: I0929 19:27:59.976822 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5f5393b3-8bf7-4578-98c9-2323a8ec7ea6-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"5f5393b3-8bf7-4578-98c9-2323a8ec7ea6\") " pod="openstack/nova-metadata-0" Sep 29 19:27:59 crc kubenswrapper[4779]: I0929 19:27:59.976922 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/5f5393b3-8bf7-4578-98c9-2323a8ec7ea6-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"5f5393b3-8bf7-4578-98c9-2323a8ec7ea6\") " pod="openstack/nova-metadata-0" Sep 29 19:27:59 crc kubenswrapper[4779]: I0929 19:27:59.976947 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5f5393b3-8bf7-4578-98c9-2323a8ec7ea6-config-data\") pod \"nova-metadata-0\" (UID: \"5f5393b3-8bf7-4578-98c9-2323a8ec7ea6\") " pod="openstack/nova-metadata-0" Sep 29 19:27:59 crc kubenswrapper[4779]: I0929 19:27:59.976965 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5f5393b3-8bf7-4578-98c9-2323a8ec7ea6-logs\") pod \"nova-metadata-0\" (UID: \"5f5393b3-8bf7-4578-98c9-2323a8ec7ea6\") " pod="openstack/nova-metadata-0" Sep 29 19:27:59 crc kubenswrapper[4779]: I0929 19:27:59.976998 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hcpkz\" (UniqueName: \"kubernetes.io/projected/5f5393b3-8bf7-4578-98c9-2323a8ec7ea6-kube-api-access-hcpkz\") pod \"nova-metadata-0\" (UID: \"5f5393b3-8bf7-4578-98c9-2323a8ec7ea6\") " pod="openstack/nova-metadata-0" Sep 29 19:27:59 crc kubenswrapper[4779]: I0929 19:27:59.978446 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5f5393b3-8bf7-4578-98c9-2323a8ec7ea6-logs\") pod \"nova-metadata-0\" (UID: \"5f5393b3-8bf7-4578-98c9-2323a8ec7ea6\") " pod="openstack/nova-metadata-0" Sep 29 19:27:59 crc kubenswrapper[4779]: I0929 19:27:59.981066 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5f5393b3-8bf7-4578-98c9-2323a8ec7ea6-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"5f5393b3-8bf7-4578-98c9-2323a8ec7ea6\") " pod="openstack/nova-metadata-0" Sep 29 19:27:59 crc kubenswrapper[4779]: I0929 19:27:59.981424 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5f5393b3-8bf7-4578-98c9-2323a8ec7ea6-config-data\") pod \"nova-metadata-0\" (UID: \"5f5393b3-8bf7-4578-98c9-2323a8ec7ea6\") " pod="openstack/nova-metadata-0" Sep 29 19:27:59 crc kubenswrapper[4779]: I0929 19:27:59.981946 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/5f5393b3-8bf7-4578-98c9-2323a8ec7ea6-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"5f5393b3-8bf7-4578-98c9-2323a8ec7ea6\") " pod="openstack/nova-metadata-0" Sep 29 19:27:59 crc kubenswrapper[4779]: I0929 19:27:59.994654 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hcpkz\" (UniqueName: \"kubernetes.io/projected/5f5393b3-8bf7-4578-98c9-2323a8ec7ea6-kube-api-access-hcpkz\") pod \"nova-metadata-0\" (UID: \"5f5393b3-8bf7-4578-98c9-2323a8ec7ea6\") " pod="openstack/nova-metadata-0" Sep 29 19:28:00 crc kubenswrapper[4779]: I0929 19:28:00.137504 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Sep 29 19:28:00 crc kubenswrapper[4779]: W0929 19:28:00.139024 4779 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb1327ac7_e940_435f_b344_f03e2c406800.slice/crio-7e71dfdb50f23a30d8eaf745259e08c91b7be23a0fef2b53e824a915bc81497b WatchSource:0}: Error finding container 7e71dfdb50f23a30d8eaf745259e08c91b7be23a0fef2b53e824a915bc81497b: Status 404 returned error can't find the container with id 7e71dfdb50f23a30d8eaf745259e08c91b7be23a0fef2b53e824a915bc81497b Sep 29 19:28:00 crc kubenswrapper[4779]: I0929 19:28:00.211286 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Sep 29 19:28:00 crc kubenswrapper[4779]: I0929 19:28:00.329094 4779 generic.go:334] "Generic (PLEG): container finished" podID="57f1ca09-c341-4f3c-a1e0-e9f27777601f" containerID="a15cd7424bab1deb21019137d5fa5cbaf35ed4c9e748e3a29c86bf966b587789" exitCode=0 Sep 29 19:28:00 crc kubenswrapper[4779]: I0929 19:28:00.329336 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Sep 29 19:28:00 crc kubenswrapper[4779]: I0929 19:28:00.329355 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"57f1ca09-c341-4f3c-a1e0-e9f27777601f","Type":"ContainerDied","Data":"a15cd7424bab1deb21019137d5fa5cbaf35ed4c9e748e3a29c86bf966b587789"} Sep 29 19:28:00 crc kubenswrapper[4779]: I0929 19:28:00.329702 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"57f1ca09-c341-4f3c-a1e0-e9f27777601f","Type":"ContainerDied","Data":"79f43c83ed00301f5eeadc2aa21adef0d61e8c8b362dc69f769e41d0b1e7b8be"} Sep 29 19:28:00 crc kubenswrapper[4779]: I0929 19:28:00.329754 4779 scope.go:117] "RemoveContainer" containerID="a15cd7424bab1deb21019137d5fa5cbaf35ed4c9e748e3a29c86bf966b587789" Sep 29 19:28:00 crc kubenswrapper[4779]: I0929 19:28:00.335137 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"b1327ac7-e940-435f-b344-f03e2c406800","Type":"ContainerStarted","Data":"7e71dfdb50f23a30d8eaf745259e08c91b7be23a0fef2b53e824a915bc81497b"} Sep 29 19:28:00 crc kubenswrapper[4779]: I0929 19:28:00.375400 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Sep 29 19:28:00 crc kubenswrapper[4779]: I0929 19:28:00.398624 4779 scope.go:117] "RemoveContainer" containerID="a15cd7424bab1deb21019137d5fa5cbaf35ed4c9e748e3a29c86bf966b587789" Sep 29 19:28:00 crc kubenswrapper[4779]: I0929 19:28:00.398669 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Sep 29 19:28:00 crc kubenswrapper[4779]: E0929 19:28:00.399176 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a15cd7424bab1deb21019137d5fa5cbaf35ed4c9e748e3a29c86bf966b587789\": container with ID starting with a15cd7424bab1deb21019137d5fa5cbaf35ed4c9e748e3a29c86bf966b587789 not found: ID does not exist" containerID="a15cd7424bab1deb21019137d5fa5cbaf35ed4c9e748e3a29c86bf966b587789" Sep 29 19:28:00 crc kubenswrapper[4779]: I0929 19:28:00.399203 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a15cd7424bab1deb21019137d5fa5cbaf35ed4c9e748e3a29c86bf966b587789"} err="failed to get container status \"a15cd7424bab1deb21019137d5fa5cbaf35ed4c9e748e3a29c86bf966b587789\": rpc error: code = NotFound desc = could not find container \"a15cd7424bab1deb21019137d5fa5cbaf35ed4c9e748e3a29c86bf966b587789\": container with ID starting with a15cd7424bab1deb21019137d5fa5cbaf35ed4c9e748e3a29c86bf966b587789 not found: ID does not exist" Sep 29 19:28:00 crc kubenswrapper[4779]: I0929 19:28:00.410273 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Sep 29 19:28:00 crc kubenswrapper[4779]: I0929 19:28:00.411652 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Sep 29 19:28:00 crc kubenswrapper[4779]: I0929 19:28:00.415038 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Sep 29 19:28:00 crc kubenswrapper[4779]: I0929 19:28:00.438213 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Sep 29 19:28:00 crc kubenswrapper[4779]: I0929 19:28:00.586952 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/26aa6a8b-e675-4b3e-aa17-9ed17b49c907-config-data\") pod \"nova-scheduler-0\" (UID: \"26aa6a8b-e675-4b3e-aa17-9ed17b49c907\") " pod="openstack/nova-scheduler-0" Sep 29 19:28:00 crc kubenswrapper[4779]: I0929 19:28:00.587365 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/26aa6a8b-e675-4b3e-aa17-9ed17b49c907-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"26aa6a8b-e675-4b3e-aa17-9ed17b49c907\") " pod="openstack/nova-scheduler-0" Sep 29 19:28:00 crc kubenswrapper[4779]: I0929 19:28:00.587452 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lv5vk\" (UniqueName: \"kubernetes.io/projected/26aa6a8b-e675-4b3e-aa17-9ed17b49c907-kube-api-access-lv5vk\") pod \"nova-scheduler-0\" (UID: \"26aa6a8b-e675-4b3e-aa17-9ed17b49c907\") " pod="openstack/nova-scheduler-0" Sep 29 19:28:00 crc kubenswrapper[4779]: I0929 19:28:00.674149 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Sep 29 19:28:00 crc kubenswrapper[4779]: W0929 19:28:00.675886 4779 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5f5393b3_8bf7_4578_98c9_2323a8ec7ea6.slice/crio-074e71d477692cce13136f1e2c806fb09e9bf9e978153c09bb06915c0b41758c WatchSource:0}: Error finding container 074e71d477692cce13136f1e2c806fb09e9bf9e978153c09bb06915c0b41758c: Status 404 returned error can't find the container with id 074e71d477692cce13136f1e2c806fb09e9bf9e978153c09bb06915c0b41758c Sep 29 19:28:00 crc kubenswrapper[4779]: I0929 19:28:00.690531 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/26aa6a8b-e675-4b3e-aa17-9ed17b49c907-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"26aa6a8b-e675-4b3e-aa17-9ed17b49c907\") " pod="openstack/nova-scheduler-0" Sep 29 19:28:00 crc kubenswrapper[4779]: I0929 19:28:00.690845 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lv5vk\" (UniqueName: \"kubernetes.io/projected/26aa6a8b-e675-4b3e-aa17-9ed17b49c907-kube-api-access-lv5vk\") pod \"nova-scheduler-0\" (UID: \"26aa6a8b-e675-4b3e-aa17-9ed17b49c907\") " pod="openstack/nova-scheduler-0" Sep 29 19:28:00 crc kubenswrapper[4779]: I0929 19:28:00.690925 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/26aa6a8b-e675-4b3e-aa17-9ed17b49c907-config-data\") pod \"nova-scheduler-0\" (UID: \"26aa6a8b-e675-4b3e-aa17-9ed17b49c907\") " pod="openstack/nova-scheduler-0" Sep 29 19:28:00 crc kubenswrapper[4779]: I0929 19:28:00.694635 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/26aa6a8b-e675-4b3e-aa17-9ed17b49c907-config-data\") pod \"nova-scheduler-0\" (UID: \"26aa6a8b-e675-4b3e-aa17-9ed17b49c907\") " pod="openstack/nova-scheduler-0" Sep 29 19:28:00 crc kubenswrapper[4779]: I0929 19:28:00.695217 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/26aa6a8b-e675-4b3e-aa17-9ed17b49c907-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"26aa6a8b-e675-4b3e-aa17-9ed17b49c907\") " pod="openstack/nova-scheduler-0" Sep 29 19:28:00 crc kubenswrapper[4779]: I0929 19:28:00.706353 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lv5vk\" (UniqueName: \"kubernetes.io/projected/26aa6a8b-e675-4b3e-aa17-9ed17b49c907-kube-api-access-lv5vk\") pod \"nova-scheduler-0\" (UID: \"26aa6a8b-e675-4b3e-aa17-9ed17b49c907\") " pod="openstack/nova-scheduler-0" Sep 29 19:28:00 crc kubenswrapper[4779]: I0929 19:28:00.732568 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Sep 29 19:28:01 crc kubenswrapper[4779]: W0929 19:28:01.178618 4779 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod26aa6a8b_e675_4b3e_aa17_9ed17b49c907.slice/crio-25c03edf282e97b262c968ceb3f5a01069f237a50fb859180f6ca44e6fcbc2d6 WatchSource:0}: Error finding container 25c03edf282e97b262c968ceb3f5a01069f237a50fb859180f6ca44e6fcbc2d6: Status 404 returned error can't find the container with id 25c03edf282e97b262c968ceb3f5a01069f237a50fb859180f6ca44e6fcbc2d6 Sep 29 19:28:01 crc kubenswrapper[4779]: I0929 19:28:01.180708 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Sep 29 19:28:01 crc kubenswrapper[4779]: I0929 19:28:01.362422 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"b1327ac7-e940-435f-b344-f03e2c406800","Type":"ContainerStarted","Data":"1398aa74149e59d441ba14ca6f41790588a126f093f60a05b30a0b0d73e57d18"} Sep 29 19:28:01 crc kubenswrapper[4779]: I0929 19:28:01.362500 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"b1327ac7-e940-435f-b344-f03e2c406800","Type":"ContainerStarted","Data":"a866ab138804053e3ed6b09ce2acbbb4e5f81198b6cd3ff405153f855c13d85b"} Sep 29 19:28:01 crc kubenswrapper[4779]: I0929 19:28:01.364230 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"5f5393b3-8bf7-4578-98c9-2323a8ec7ea6","Type":"ContainerStarted","Data":"e6691b3010efcf4cb8267a6db84ac1a6e1ec2b68a9b0cf470a344731b3f1a5a5"} Sep 29 19:28:01 crc kubenswrapper[4779]: I0929 19:28:01.364279 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"5f5393b3-8bf7-4578-98c9-2323a8ec7ea6","Type":"ContainerStarted","Data":"fcf7c0281eef6f470691a3f8d5abfb1f99b5cf2a0fb2ee95281200f5fd8a026e"} Sep 29 19:28:01 crc kubenswrapper[4779]: I0929 19:28:01.364294 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"5f5393b3-8bf7-4578-98c9-2323a8ec7ea6","Type":"ContainerStarted","Data":"074e71d477692cce13136f1e2c806fb09e9bf9e978153c09bb06915c0b41758c"} Sep 29 19:28:01 crc kubenswrapper[4779]: I0929 19:28:01.367648 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"26aa6a8b-e675-4b3e-aa17-9ed17b49c907","Type":"ContainerStarted","Data":"77214824682a9a973bbdaaae84c326358fa6631d42e23850bfd1647852108a0f"} Sep 29 19:28:01 crc kubenswrapper[4779]: I0929 19:28:01.367708 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"26aa6a8b-e675-4b3e-aa17-9ed17b49c907","Type":"ContainerStarted","Data":"25c03edf282e97b262c968ceb3f5a01069f237a50fb859180f6ca44e6fcbc2d6"} Sep 29 19:28:01 crc kubenswrapper[4779]: I0929 19:28:01.386245 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.386225681 podStartE2EDuration="2.386225681s" podCreationTimestamp="2025-09-29 19:27:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 19:28:01.385680026 +0000 UTC m=+1192.270105136" watchObservedRunningTime="2025-09-29 19:28:01.386225681 +0000 UTC m=+1192.270650781" Sep 29 19:28:01 crc kubenswrapper[4779]: I0929 19:28:01.412858 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=1.412837267 podStartE2EDuration="1.412837267s" podCreationTimestamp="2025-09-29 19:28:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 19:28:01.402790783 +0000 UTC m=+1192.287215883" watchObservedRunningTime="2025-09-29 19:28:01.412837267 +0000 UTC m=+1192.297262367" Sep 29 19:28:01 crc kubenswrapper[4779]: I0929 19:28:01.425167 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.425150734 podStartE2EDuration="2.425150734s" podCreationTimestamp="2025-09-29 19:27:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 19:28:01.421112593 +0000 UTC m=+1192.305537693" watchObservedRunningTime="2025-09-29 19:28:01.425150734 +0000 UTC m=+1192.309575834" Sep 29 19:28:01 crc kubenswrapper[4779]: I0929 19:28:01.777095 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="57f1ca09-c341-4f3c-a1e0-e9f27777601f" path="/var/lib/kubelet/pods/57f1ca09-c341-4f3c-a1e0-e9f27777601f/volumes" Sep 29 19:28:05 crc kubenswrapper[4779]: I0929 19:28:05.211680 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Sep 29 19:28:05 crc kubenswrapper[4779]: I0929 19:28:05.212651 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Sep 29 19:28:05 crc kubenswrapper[4779]: I0929 19:28:05.732893 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Sep 29 19:28:08 crc kubenswrapper[4779]: E0929 19:28:08.746228 4779 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1ffdc490_1007_41e8_a410_97a78d400395.slice/crio-884b7083c272788693584552bc8512f7ab999b2bfeffc1ead266268aaa1801d5\": RecentStats: unable to find data in memory cache]" Sep 29 19:28:08 crc kubenswrapper[4779]: I0929 19:28:08.771902 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Sep 29 19:28:09 crc kubenswrapper[4779]: I0929 19:28:09.703889 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Sep 29 19:28:09 crc kubenswrapper[4779]: I0929 19:28:09.704261 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Sep 29 19:28:10 crc kubenswrapper[4779]: I0929 19:28:10.212267 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Sep 29 19:28:10 crc kubenswrapper[4779]: I0929 19:28:10.212412 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Sep 29 19:28:10 crc kubenswrapper[4779]: I0929 19:28:10.733610 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Sep 29 19:28:10 crc kubenswrapper[4779]: I0929 19:28:10.780711 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Sep 29 19:28:10 crc kubenswrapper[4779]: I0929 19:28:10.787592 4779 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="b1327ac7-e940-435f-b344-f03e2c406800" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.0.199:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Sep 29 19:28:10 crc kubenswrapper[4779]: I0929 19:28:10.787625 4779 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="b1327ac7-e940-435f-b344-f03e2c406800" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.0.199:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Sep 29 19:28:11 crc kubenswrapper[4779]: I0929 19:28:11.230616 4779 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="5f5393b3-8bf7-4578-98c9-2323a8ec7ea6" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.200:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Sep 29 19:28:11 crc kubenswrapper[4779]: I0929 19:28:11.230667 4779 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="5f5393b3-8bf7-4578-98c9-2323a8ec7ea6" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.200:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Sep 29 19:28:11 crc kubenswrapper[4779]: I0929 19:28:11.512550 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Sep 29 19:28:12 crc kubenswrapper[4779]: I0929 19:28:12.672096 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/kube-state-metrics-0"] Sep 29 19:28:12 crc kubenswrapper[4779]: I0929 19:28:12.672607 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/kube-state-metrics-0" podUID="ebdb9bba-7f1b-4b21-b4e8-df31d3a8684c" containerName="kube-state-metrics" containerID="cri-o://c27cc2552e3e4e6c5f72414cc36b943039f9ed94926d9febe0b7c874be756137" gracePeriod=30 Sep 29 19:28:13 crc kubenswrapper[4779]: I0929 19:28:13.147676 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Sep 29 19:28:13 crc kubenswrapper[4779]: I0929 19:28:13.278748 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tcck6\" (UniqueName: \"kubernetes.io/projected/ebdb9bba-7f1b-4b21-b4e8-df31d3a8684c-kube-api-access-tcck6\") pod \"ebdb9bba-7f1b-4b21-b4e8-df31d3a8684c\" (UID: \"ebdb9bba-7f1b-4b21-b4e8-df31d3a8684c\") " Sep 29 19:28:13 crc kubenswrapper[4779]: I0929 19:28:13.294626 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ebdb9bba-7f1b-4b21-b4e8-df31d3a8684c-kube-api-access-tcck6" (OuterVolumeSpecName: "kube-api-access-tcck6") pod "ebdb9bba-7f1b-4b21-b4e8-df31d3a8684c" (UID: "ebdb9bba-7f1b-4b21-b4e8-df31d3a8684c"). InnerVolumeSpecName "kube-api-access-tcck6". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:28:13 crc kubenswrapper[4779]: I0929 19:28:13.381339 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tcck6\" (UniqueName: \"kubernetes.io/projected/ebdb9bba-7f1b-4b21-b4e8-df31d3a8684c-kube-api-access-tcck6\") on node \"crc\" DevicePath \"\"" Sep 29 19:28:13 crc kubenswrapper[4779]: I0929 19:28:13.494054 4779 generic.go:334] "Generic (PLEG): container finished" podID="ebdb9bba-7f1b-4b21-b4e8-df31d3a8684c" containerID="c27cc2552e3e4e6c5f72414cc36b943039f9ed94926d9febe0b7c874be756137" exitCode=2 Sep 29 19:28:13 crc kubenswrapper[4779]: I0929 19:28:13.494097 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"ebdb9bba-7f1b-4b21-b4e8-df31d3a8684c","Type":"ContainerDied","Data":"c27cc2552e3e4e6c5f72414cc36b943039f9ed94926d9febe0b7c874be756137"} Sep 29 19:28:13 crc kubenswrapper[4779]: I0929 19:28:13.494111 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Sep 29 19:28:13 crc kubenswrapper[4779]: I0929 19:28:13.494135 4779 scope.go:117] "RemoveContainer" containerID="c27cc2552e3e4e6c5f72414cc36b943039f9ed94926d9febe0b7c874be756137" Sep 29 19:28:13 crc kubenswrapper[4779]: I0929 19:28:13.494124 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"ebdb9bba-7f1b-4b21-b4e8-df31d3a8684c","Type":"ContainerDied","Data":"2101be65d6b9c39d2907e20c2c18e9fa2bffc773d36a6108e9cfec6997234996"} Sep 29 19:28:13 crc kubenswrapper[4779]: I0929 19:28:13.517340 4779 scope.go:117] "RemoveContainer" containerID="c27cc2552e3e4e6c5f72414cc36b943039f9ed94926d9febe0b7c874be756137" Sep 29 19:28:13 crc kubenswrapper[4779]: E0929 19:28:13.517852 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c27cc2552e3e4e6c5f72414cc36b943039f9ed94926d9febe0b7c874be756137\": container with ID starting with c27cc2552e3e4e6c5f72414cc36b943039f9ed94926d9febe0b7c874be756137 not found: ID does not exist" containerID="c27cc2552e3e4e6c5f72414cc36b943039f9ed94926d9febe0b7c874be756137" Sep 29 19:28:13 crc kubenswrapper[4779]: I0929 19:28:13.517886 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c27cc2552e3e4e6c5f72414cc36b943039f9ed94926d9febe0b7c874be756137"} err="failed to get container status \"c27cc2552e3e4e6c5f72414cc36b943039f9ed94926d9febe0b7c874be756137\": rpc error: code = NotFound desc = could not find container \"c27cc2552e3e4e6c5f72414cc36b943039f9ed94926d9febe0b7c874be756137\": container with ID starting with c27cc2552e3e4e6c5f72414cc36b943039f9ed94926d9febe0b7c874be756137 not found: ID does not exist" Sep 29 19:28:13 crc kubenswrapper[4779]: I0929 19:28:13.527589 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/kube-state-metrics-0"] Sep 29 19:28:13 crc kubenswrapper[4779]: I0929 19:28:13.537524 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/kube-state-metrics-0"] Sep 29 19:28:13 crc kubenswrapper[4779]: I0929 19:28:13.548185 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/kube-state-metrics-0"] Sep 29 19:28:13 crc kubenswrapper[4779]: E0929 19:28:13.550804 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ebdb9bba-7f1b-4b21-b4e8-df31d3a8684c" containerName="kube-state-metrics" Sep 29 19:28:13 crc kubenswrapper[4779]: I0929 19:28:13.550840 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="ebdb9bba-7f1b-4b21-b4e8-df31d3a8684c" containerName="kube-state-metrics" Sep 29 19:28:13 crc kubenswrapper[4779]: I0929 19:28:13.551087 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="ebdb9bba-7f1b-4b21-b4e8-df31d3a8684c" containerName="kube-state-metrics" Sep 29 19:28:13 crc kubenswrapper[4779]: I0929 19:28:13.552486 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Sep 29 19:28:13 crc kubenswrapper[4779]: I0929 19:28:13.554452 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"kube-state-metrics-tls-config" Sep 29 19:28:13 crc kubenswrapper[4779]: I0929 19:28:13.554522 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-kube-state-metrics-svc" Sep 29 19:28:13 crc kubenswrapper[4779]: I0929 19:28:13.562791 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Sep 29 19:28:13 crc kubenswrapper[4779]: I0929 19:28:13.686300 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/43945a37-324b-4e37-a960-d92da8f5e56f-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"43945a37-324b-4e37-a960-d92da8f5e56f\") " pod="openstack/kube-state-metrics-0" Sep 29 19:28:13 crc kubenswrapper[4779]: I0929 19:28:13.686627 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/43945a37-324b-4e37-a960-d92da8f5e56f-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"43945a37-324b-4e37-a960-d92da8f5e56f\") " pod="openstack/kube-state-metrics-0" Sep 29 19:28:13 crc kubenswrapper[4779]: I0929 19:28:13.686811 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/43945a37-324b-4e37-a960-d92da8f5e56f-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"43945a37-324b-4e37-a960-d92da8f5e56f\") " pod="openstack/kube-state-metrics-0" Sep 29 19:28:13 crc kubenswrapper[4779]: I0929 19:28:13.686832 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sjxxz\" (UniqueName: \"kubernetes.io/projected/43945a37-324b-4e37-a960-d92da8f5e56f-kube-api-access-sjxxz\") pod \"kube-state-metrics-0\" (UID: \"43945a37-324b-4e37-a960-d92da8f5e56f\") " pod="openstack/kube-state-metrics-0" Sep 29 19:28:13 crc kubenswrapper[4779]: I0929 19:28:13.777231 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ebdb9bba-7f1b-4b21-b4e8-df31d3a8684c" path="/var/lib/kubelet/pods/ebdb9bba-7f1b-4b21-b4e8-df31d3a8684c/volumes" Sep 29 19:28:13 crc kubenswrapper[4779]: I0929 19:28:13.785513 4779 patch_prober.go:28] interesting pod/machine-config-daemon-d5cnr container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 19:28:13 crc kubenswrapper[4779]: I0929 19:28:13.785574 4779 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" podUID="476bc421-1113-455e-bcc8-e207e47dad19" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 19:28:13 crc kubenswrapper[4779]: I0929 19:28:13.788570 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/43945a37-324b-4e37-a960-d92da8f5e56f-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"43945a37-324b-4e37-a960-d92da8f5e56f\") " pod="openstack/kube-state-metrics-0" Sep 29 19:28:13 crc kubenswrapper[4779]: I0929 19:28:13.788608 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sjxxz\" (UniqueName: \"kubernetes.io/projected/43945a37-324b-4e37-a960-d92da8f5e56f-kube-api-access-sjxxz\") pod \"kube-state-metrics-0\" (UID: \"43945a37-324b-4e37-a960-d92da8f5e56f\") " pod="openstack/kube-state-metrics-0" Sep 29 19:28:13 crc kubenswrapper[4779]: I0929 19:28:13.788648 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/43945a37-324b-4e37-a960-d92da8f5e56f-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"43945a37-324b-4e37-a960-d92da8f5e56f\") " pod="openstack/kube-state-metrics-0" Sep 29 19:28:13 crc kubenswrapper[4779]: I0929 19:28:13.788760 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/43945a37-324b-4e37-a960-d92da8f5e56f-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"43945a37-324b-4e37-a960-d92da8f5e56f\") " pod="openstack/kube-state-metrics-0" Sep 29 19:28:13 crc kubenswrapper[4779]: I0929 19:28:13.794750 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/43945a37-324b-4e37-a960-d92da8f5e56f-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"43945a37-324b-4e37-a960-d92da8f5e56f\") " pod="openstack/kube-state-metrics-0" Sep 29 19:28:13 crc kubenswrapper[4779]: I0929 19:28:13.795849 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/43945a37-324b-4e37-a960-d92da8f5e56f-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"43945a37-324b-4e37-a960-d92da8f5e56f\") " pod="openstack/kube-state-metrics-0" Sep 29 19:28:13 crc kubenswrapper[4779]: I0929 19:28:13.801947 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/43945a37-324b-4e37-a960-d92da8f5e56f-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"43945a37-324b-4e37-a960-d92da8f5e56f\") " pod="openstack/kube-state-metrics-0" Sep 29 19:28:13 crc kubenswrapper[4779]: I0929 19:28:13.833784 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sjxxz\" (UniqueName: \"kubernetes.io/projected/43945a37-324b-4e37-a960-d92da8f5e56f-kube-api-access-sjxxz\") pod \"kube-state-metrics-0\" (UID: \"43945a37-324b-4e37-a960-d92da8f5e56f\") " pod="openstack/kube-state-metrics-0" Sep 29 19:28:13 crc kubenswrapper[4779]: I0929 19:28:13.869880 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Sep 29 19:28:14 crc kubenswrapper[4779]: I0929 19:28:14.356224 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Sep 29 19:28:14 crc kubenswrapper[4779]: I0929 19:28:14.421784 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Sep 29 19:28:14 crc kubenswrapper[4779]: I0929 19:28:14.422041 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="e1ced773-4819-4d6b-a4db-9d6bb5f6f9c3" containerName="ceilometer-central-agent" containerID="cri-o://876ec120cb2f08d351d494a427e9edb1e9a469f67fffe4b16724e3f5909e637b" gracePeriod=30 Sep 29 19:28:14 crc kubenswrapper[4779]: I0929 19:28:14.422157 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="e1ced773-4819-4d6b-a4db-9d6bb5f6f9c3" containerName="ceilometer-notification-agent" containerID="cri-o://333605364d7ef98e1636f60d32c381742e7745c347d1ee937b0d684bcbf59518" gracePeriod=30 Sep 29 19:28:14 crc kubenswrapper[4779]: I0929 19:28:14.422170 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="e1ced773-4819-4d6b-a4db-9d6bb5f6f9c3" containerName="sg-core" containerID="cri-o://24e553dace1be1579dee12d64d97573c46bd88a4704e25d478496a8f6f29922f" gracePeriod=30 Sep 29 19:28:14 crc kubenswrapper[4779]: I0929 19:28:14.422291 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="e1ced773-4819-4d6b-a4db-9d6bb5f6f9c3" containerName="proxy-httpd" containerID="cri-o://222dd839a81f67c1741614289d6928adfa418e447c353ec41fc0e333993ffed6" gracePeriod=30 Sep 29 19:28:14 crc kubenswrapper[4779]: I0929 19:28:14.504759 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"43945a37-324b-4e37-a960-d92da8f5e56f","Type":"ContainerStarted","Data":"44a4985f90d08022e344bd24cbdfdfb56ba79eb0ee31c95ffd2284c15cc0b2a1"} Sep 29 19:28:15 crc kubenswrapper[4779]: I0929 19:28:15.519817 4779 generic.go:334] "Generic (PLEG): container finished" podID="e1ced773-4819-4d6b-a4db-9d6bb5f6f9c3" containerID="222dd839a81f67c1741614289d6928adfa418e447c353ec41fc0e333993ffed6" exitCode=0 Sep 29 19:28:15 crc kubenswrapper[4779]: I0929 19:28:15.520099 4779 generic.go:334] "Generic (PLEG): container finished" podID="e1ced773-4819-4d6b-a4db-9d6bb5f6f9c3" containerID="24e553dace1be1579dee12d64d97573c46bd88a4704e25d478496a8f6f29922f" exitCode=2 Sep 29 19:28:15 crc kubenswrapper[4779]: I0929 19:28:15.520111 4779 generic.go:334] "Generic (PLEG): container finished" podID="e1ced773-4819-4d6b-a4db-9d6bb5f6f9c3" containerID="876ec120cb2f08d351d494a427e9edb1e9a469f67fffe4b16724e3f5909e637b" exitCode=0 Sep 29 19:28:15 crc kubenswrapper[4779]: I0929 19:28:15.519889 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e1ced773-4819-4d6b-a4db-9d6bb5f6f9c3","Type":"ContainerDied","Data":"222dd839a81f67c1741614289d6928adfa418e447c353ec41fc0e333993ffed6"} Sep 29 19:28:15 crc kubenswrapper[4779]: I0929 19:28:15.520236 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e1ced773-4819-4d6b-a4db-9d6bb5f6f9c3","Type":"ContainerDied","Data":"24e553dace1be1579dee12d64d97573c46bd88a4704e25d478496a8f6f29922f"} Sep 29 19:28:15 crc kubenswrapper[4779]: I0929 19:28:15.520273 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e1ced773-4819-4d6b-a4db-9d6bb5f6f9c3","Type":"ContainerDied","Data":"876ec120cb2f08d351d494a427e9edb1e9a469f67fffe4b16724e3f5909e637b"} Sep 29 19:28:15 crc kubenswrapper[4779]: I0929 19:28:15.524880 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"43945a37-324b-4e37-a960-d92da8f5e56f","Type":"ContainerStarted","Data":"0c7a7594094428403057c05fa493ebaf2514885ff672cfe9da0b8833a0272d8c"} Sep 29 19:28:15 crc kubenswrapper[4779]: I0929 19:28:15.525950 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/kube-state-metrics-0" Sep 29 19:28:15 crc kubenswrapper[4779]: I0929 19:28:15.551727 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/kube-state-metrics-0" podStartSLOduration=2.209237639 podStartE2EDuration="2.551711306s" podCreationTimestamp="2025-09-29 19:28:13 +0000 UTC" firstStartedPulling="2025-09-29 19:28:14.35629951 +0000 UTC m=+1205.240724610" lastFinishedPulling="2025-09-29 19:28:14.698773187 +0000 UTC m=+1205.583198277" observedRunningTime="2025-09-29 19:28:15.547061829 +0000 UTC m=+1206.431486949" watchObservedRunningTime="2025-09-29 19:28:15.551711306 +0000 UTC m=+1206.436136406" Sep 29 19:28:16 crc kubenswrapper[4779]: I0929 19:28:16.478684 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 19:28:16 crc kubenswrapper[4779]: I0929 19:28:16.541156 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e1ced773-4819-4d6b-a4db-9d6bb5f6f9c3-combined-ca-bundle\") pod \"e1ced773-4819-4d6b-a4db-9d6bb5f6f9c3\" (UID: \"e1ced773-4819-4d6b-a4db-9d6bb5f6f9c3\") " Sep 29 19:28:16 crc kubenswrapper[4779]: I0929 19:28:16.541213 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e1ced773-4819-4d6b-a4db-9d6bb5f6f9c3-scripts\") pod \"e1ced773-4819-4d6b-a4db-9d6bb5f6f9c3\" (UID: \"e1ced773-4819-4d6b-a4db-9d6bb5f6f9c3\") " Sep 29 19:28:16 crc kubenswrapper[4779]: I0929 19:28:16.541259 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-f4fv9\" (UniqueName: \"kubernetes.io/projected/e1ced773-4819-4d6b-a4db-9d6bb5f6f9c3-kube-api-access-f4fv9\") pod \"e1ced773-4819-4d6b-a4db-9d6bb5f6f9c3\" (UID: \"e1ced773-4819-4d6b-a4db-9d6bb5f6f9c3\") " Sep 29 19:28:16 crc kubenswrapper[4779]: I0929 19:28:16.541298 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e1ced773-4819-4d6b-a4db-9d6bb5f6f9c3-config-data\") pod \"e1ced773-4819-4d6b-a4db-9d6bb5f6f9c3\" (UID: \"e1ced773-4819-4d6b-a4db-9d6bb5f6f9c3\") " Sep 29 19:28:16 crc kubenswrapper[4779]: I0929 19:28:16.541348 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e1ced773-4819-4d6b-a4db-9d6bb5f6f9c3-log-httpd\") pod \"e1ced773-4819-4d6b-a4db-9d6bb5f6f9c3\" (UID: \"e1ced773-4819-4d6b-a4db-9d6bb5f6f9c3\") " Sep 29 19:28:16 crc kubenswrapper[4779]: I0929 19:28:16.541451 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e1ced773-4819-4d6b-a4db-9d6bb5f6f9c3-run-httpd\") pod \"e1ced773-4819-4d6b-a4db-9d6bb5f6f9c3\" (UID: \"e1ced773-4819-4d6b-a4db-9d6bb5f6f9c3\") " Sep 29 19:28:16 crc kubenswrapper[4779]: I0929 19:28:16.541496 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/e1ced773-4819-4d6b-a4db-9d6bb5f6f9c3-sg-core-conf-yaml\") pod \"e1ced773-4819-4d6b-a4db-9d6bb5f6f9c3\" (UID: \"e1ced773-4819-4d6b-a4db-9d6bb5f6f9c3\") " Sep 29 19:28:16 crc kubenswrapper[4779]: I0929 19:28:16.548350 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e1ced773-4819-4d6b-a4db-9d6bb5f6f9c3-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "e1ced773-4819-4d6b-a4db-9d6bb5f6f9c3" (UID: "e1ced773-4819-4d6b-a4db-9d6bb5f6f9c3"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 19:28:16 crc kubenswrapper[4779]: I0929 19:28:16.549553 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e1ced773-4819-4d6b-a4db-9d6bb5f6f9c3-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "e1ced773-4819-4d6b-a4db-9d6bb5f6f9c3" (UID: "e1ced773-4819-4d6b-a4db-9d6bb5f6f9c3"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 19:28:16 crc kubenswrapper[4779]: I0929 19:28:16.552116 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e1ced773-4819-4d6b-a4db-9d6bb5f6f9c3-kube-api-access-f4fv9" (OuterVolumeSpecName: "kube-api-access-f4fv9") pod "e1ced773-4819-4d6b-a4db-9d6bb5f6f9c3" (UID: "e1ced773-4819-4d6b-a4db-9d6bb5f6f9c3"). InnerVolumeSpecName "kube-api-access-f4fv9". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:28:16 crc kubenswrapper[4779]: I0929 19:28:16.558107 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e1ced773-4819-4d6b-a4db-9d6bb5f6f9c3-scripts" (OuterVolumeSpecName: "scripts") pod "e1ced773-4819-4d6b-a4db-9d6bb5f6f9c3" (UID: "e1ced773-4819-4d6b-a4db-9d6bb5f6f9c3"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:28:16 crc kubenswrapper[4779]: I0929 19:28:16.576207 4779 generic.go:334] "Generic (PLEG): container finished" podID="e1ced773-4819-4d6b-a4db-9d6bb5f6f9c3" containerID="333605364d7ef98e1636f60d32c381742e7745c347d1ee937b0d684bcbf59518" exitCode=0 Sep 29 19:28:16 crc kubenswrapper[4779]: I0929 19:28:16.576604 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 19:28:16 crc kubenswrapper[4779]: I0929 19:28:16.576869 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e1ced773-4819-4d6b-a4db-9d6bb5f6f9c3","Type":"ContainerDied","Data":"333605364d7ef98e1636f60d32c381742e7745c347d1ee937b0d684bcbf59518"} Sep 29 19:28:16 crc kubenswrapper[4779]: I0929 19:28:16.576938 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e1ced773-4819-4d6b-a4db-9d6bb5f6f9c3","Type":"ContainerDied","Data":"a1fdca301a1935a8863752f28ca0d524a4bc151f8c6a2cc1c24d605f74262c91"} Sep 29 19:28:16 crc kubenswrapper[4779]: I0929 19:28:16.576961 4779 scope.go:117] "RemoveContainer" containerID="222dd839a81f67c1741614289d6928adfa418e447c353ec41fc0e333993ffed6" Sep 29 19:28:16 crc kubenswrapper[4779]: I0929 19:28:16.590481 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e1ced773-4819-4d6b-a4db-9d6bb5f6f9c3-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "e1ced773-4819-4d6b-a4db-9d6bb5f6f9c3" (UID: "e1ced773-4819-4d6b-a4db-9d6bb5f6f9c3"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:28:16 crc kubenswrapper[4779]: I0929 19:28:16.643680 4779 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e1ced773-4819-4d6b-a4db-9d6bb5f6f9c3-log-httpd\") on node \"crc\" DevicePath \"\"" Sep 29 19:28:16 crc kubenswrapper[4779]: I0929 19:28:16.643713 4779 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e1ced773-4819-4d6b-a4db-9d6bb5f6f9c3-run-httpd\") on node \"crc\" DevicePath \"\"" Sep 29 19:28:16 crc kubenswrapper[4779]: I0929 19:28:16.643725 4779 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/e1ced773-4819-4d6b-a4db-9d6bb5f6f9c3-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Sep 29 19:28:16 crc kubenswrapper[4779]: I0929 19:28:16.643737 4779 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e1ced773-4819-4d6b-a4db-9d6bb5f6f9c3-scripts\") on node \"crc\" DevicePath \"\"" Sep 29 19:28:16 crc kubenswrapper[4779]: I0929 19:28:16.643747 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-f4fv9\" (UniqueName: \"kubernetes.io/projected/e1ced773-4819-4d6b-a4db-9d6bb5f6f9c3-kube-api-access-f4fv9\") on node \"crc\" DevicePath \"\"" Sep 29 19:28:16 crc kubenswrapper[4779]: I0929 19:28:16.655502 4779 scope.go:117] "RemoveContainer" containerID="24e553dace1be1579dee12d64d97573c46bd88a4704e25d478496a8f6f29922f" Sep 29 19:28:16 crc kubenswrapper[4779]: I0929 19:28:16.655598 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e1ced773-4819-4d6b-a4db-9d6bb5f6f9c3-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e1ced773-4819-4d6b-a4db-9d6bb5f6f9c3" (UID: "e1ced773-4819-4d6b-a4db-9d6bb5f6f9c3"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:28:16 crc kubenswrapper[4779]: I0929 19:28:16.673778 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e1ced773-4819-4d6b-a4db-9d6bb5f6f9c3-config-data" (OuterVolumeSpecName: "config-data") pod "e1ced773-4819-4d6b-a4db-9d6bb5f6f9c3" (UID: "e1ced773-4819-4d6b-a4db-9d6bb5f6f9c3"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:28:16 crc kubenswrapper[4779]: I0929 19:28:16.676027 4779 scope.go:117] "RemoveContainer" containerID="333605364d7ef98e1636f60d32c381742e7745c347d1ee937b0d684bcbf59518" Sep 29 19:28:16 crc kubenswrapper[4779]: I0929 19:28:16.694009 4779 scope.go:117] "RemoveContainer" containerID="876ec120cb2f08d351d494a427e9edb1e9a469f67fffe4b16724e3f5909e637b" Sep 29 19:28:16 crc kubenswrapper[4779]: I0929 19:28:16.710968 4779 scope.go:117] "RemoveContainer" containerID="222dd839a81f67c1741614289d6928adfa418e447c353ec41fc0e333993ffed6" Sep 29 19:28:16 crc kubenswrapper[4779]: E0929 19:28:16.711443 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"222dd839a81f67c1741614289d6928adfa418e447c353ec41fc0e333993ffed6\": container with ID starting with 222dd839a81f67c1741614289d6928adfa418e447c353ec41fc0e333993ffed6 not found: ID does not exist" containerID="222dd839a81f67c1741614289d6928adfa418e447c353ec41fc0e333993ffed6" Sep 29 19:28:16 crc kubenswrapper[4779]: I0929 19:28:16.711488 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"222dd839a81f67c1741614289d6928adfa418e447c353ec41fc0e333993ffed6"} err="failed to get container status \"222dd839a81f67c1741614289d6928adfa418e447c353ec41fc0e333993ffed6\": rpc error: code = NotFound desc = could not find container \"222dd839a81f67c1741614289d6928adfa418e447c353ec41fc0e333993ffed6\": container with ID starting with 222dd839a81f67c1741614289d6928adfa418e447c353ec41fc0e333993ffed6 not found: ID does not exist" Sep 29 19:28:16 crc kubenswrapper[4779]: I0929 19:28:16.711515 4779 scope.go:117] "RemoveContainer" containerID="24e553dace1be1579dee12d64d97573c46bd88a4704e25d478496a8f6f29922f" Sep 29 19:28:16 crc kubenswrapper[4779]: E0929 19:28:16.711805 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"24e553dace1be1579dee12d64d97573c46bd88a4704e25d478496a8f6f29922f\": container with ID starting with 24e553dace1be1579dee12d64d97573c46bd88a4704e25d478496a8f6f29922f not found: ID does not exist" containerID="24e553dace1be1579dee12d64d97573c46bd88a4704e25d478496a8f6f29922f" Sep 29 19:28:16 crc kubenswrapper[4779]: I0929 19:28:16.711834 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"24e553dace1be1579dee12d64d97573c46bd88a4704e25d478496a8f6f29922f"} err="failed to get container status \"24e553dace1be1579dee12d64d97573c46bd88a4704e25d478496a8f6f29922f\": rpc error: code = NotFound desc = could not find container \"24e553dace1be1579dee12d64d97573c46bd88a4704e25d478496a8f6f29922f\": container with ID starting with 24e553dace1be1579dee12d64d97573c46bd88a4704e25d478496a8f6f29922f not found: ID does not exist" Sep 29 19:28:16 crc kubenswrapper[4779]: I0929 19:28:16.711858 4779 scope.go:117] "RemoveContainer" containerID="333605364d7ef98e1636f60d32c381742e7745c347d1ee937b0d684bcbf59518" Sep 29 19:28:16 crc kubenswrapper[4779]: E0929 19:28:16.712123 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"333605364d7ef98e1636f60d32c381742e7745c347d1ee937b0d684bcbf59518\": container with ID starting with 333605364d7ef98e1636f60d32c381742e7745c347d1ee937b0d684bcbf59518 not found: ID does not exist" containerID="333605364d7ef98e1636f60d32c381742e7745c347d1ee937b0d684bcbf59518" Sep 29 19:28:16 crc kubenswrapper[4779]: I0929 19:28:16.712163 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"333605364d7ef98e1636f60d32c381742e7745c347d1ee937b0d684bcbf59518"} err="failed to get container status \"333605364d7ef98e1636f60d32c381742e7745c347d1ee937b0d684bcbf59518\": rpc error: code = NotFound desc = could not find container \"333605364d7ef98e1636f60d32c381742e7745c347d1ee937b0d684bcbf59518\": container with ID starting with 333605364d7ef98e1636f60d32c381742e7745c347d1ee937b0d684bcbf59518 not found: ID does not exist" Sep 29 19:28:16 crc kubenswrapper[4779]: I0929 19:28:16.712191 4779 scope.go:117] "RemoveContainer" containerID="876ec120cb2f08d351d494a427e9edb1e9a469f67fffe4b16724e3f5909e637b" Sep 29 19:28:16 crc kubenswrapper[4779]: E0929 19:28:16.712505 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"876ec120cb2f08d351d494a427e9edb1e9a469f67fffe4b16724e3f5909e637b\": container with ID starting with 876ec120cb2f08d351d494a427e9edb1e9a469f67fffe4b16724e3f5909e637b not found: ID does not exist" containerID="876ec120cb2f08d351d494a427e9edb1e9a469f67fffe4b16724e3f5909e637b" Sep 29 19:28:16 crc kubenswrapper[4779]: I0929 19:28:16.712606 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"876ec120cb2f08d351d494a427e9edb1e9a469f67fffe4b16724e3f5909e637b"} err="failed to get container status \"876ec120cb2f08d351d494a427e9edb1e9a469f67fffe4b16724e3f5909e637b\": rpc error: code = NotFound desc = could not find container \"876ec120cb2f08d351d494a427e9edb1e9a469f67fffe4b16724e3f5909e637b\": container with ID starting with 876ec120cb2f08d351d494a427e9edb1e9a469f67fffe4b16724e3f5909e637b not found: ID does not exist" Sep 29 19:28:16 crc kubenswrapper[4779]: I0929 19:28:16.745678 4779 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e1ced773-4819-4d6b-a4db-9d6bb5f6f9c3-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 19:28:16 crc kubenswrapper[4779]: I0929 19:28:16.745708 4779 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e1ced773-4819-4d6b-a4db-9d6bb5f6f9c3-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 19:28:16 crc kubenswrapper[4779]: I0929 19:28:16.938588 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Sep 29 19:28:16 crc kubenswrapper[4779]: I0929 19:28:16.959528 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Sep 29 19:28:16 crc kubenswrapper[4779]: I0929 19:28:16.984696 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Sep 29 19:28:16 crc kubenswrapper[4779]: E0929 19:28:16.985088 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e1ced773-4819-4d6b-a4db-9d6bb5f6f9c3" containerName="sg-core" Sep 29 19:28:16 crc kubenswrapper[4779]: I0929 19:28:16.985104 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="e1ced773-4819-4d6b-a4db-9d6bb5f6f9c3" containerName="sg-core" Sep 29 19:28:16 crc kubenswrapper[4779]: E0929 19:28:16.985131 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e1ced773-4819-4d6b-a4db-9d6bb5f6f9c3" containerName="ceilometer-central-agent" Sep 29 19:28:16 crc kubenswrapper[4779]: I0929 19:28:16.985138 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="e1ced773-4819-4d6b-a4db-9d6bb5f6f9c3" containerName="ceilometer-central-agent" Sep 29 19:28:16 crc kubenswrapper[4779]: E0929 19:28:16.985149 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e1ced773-4819-4d6b-a4db-9d6bb5f6f9c3" containerName="proxy-httpd" Sep 29 19:28:16 crc kubenswrapper[4779]: I0929 19:28:16.985155 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="e1ced773-4819-4d6b-a4db-9d6bb5f6f9c3" containerName="proxy-httpd" Sep 29 19:28:16 crc kubenswrapper[4779]: E0929 19:28:16.985162 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e1ced773-4819-4d6b-a4db-9d6bb5f6f9c3" containerName="ceilometer-notification-agent" Sep 29 19:28:16 crc kubenswrapper[4779]: I0929 19:28:16.985168 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="e1ced773-4819-4d6b-a4db-9d6bb5f6f9c3" containerName="ceilometer-notification-agent" Sep 29 19:28:16 crc kubenswrapper[4779]: I0929 19:28:16.985469 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="e1ced773-4819-4d6b-a4db-9d6bb5f6f9c3" containerName="ceilometer-central-agent" Sep 29 19:28:16 crc kubenswrapper[4779]: I0929 19:28:16.985499 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="e1ced773-4819-4d6b-a4db-9d6bb5f6f9c3" containerName="sg-core" Sep 29 19:28:16 crc kubenswrapper[4779]: I0929 19:28:16.985512 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="e1ced773-4819-4d6b-a4db-9d6bb5f6f9c3" containerName="proxy-httpd" Sep 29 19:28:16 crc kubenswrapper[4779]: I0929 19:28:16.985521 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="e1ced773-4819-4d6b-a4db-9d6bb5f6f9c3" containerName="ceilometer-notification-agent" Sep 29 19:28:16 crc kubenswrapper[4779]: I0929 19:28:16.987185 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 19:28:16 crc kubenswrapper[4779]: I0929 19:28:16.989951 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Sep 29 19:28:16 crc kubenswrapper[4779]: I0929 19:28:16.990746 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Sep 29 19:28:16 crc kubenswrapper[4779]: I0929 19:28:16.991056 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Sep 29 19:28:17 crc kubenswrapper[4779]: I0929 19:28:17.000155 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 29 19:28:17 crc kubenswrapper[4779]: I0929 19:28:17.055942 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/838e3b9e-959b-4b60-ba2b-4a81f2449f60-log-httpd\") pod \"ceilometer-0\" (UID: \"838e3b9e-959b-4b60-ba2b-4a81f2449f60\") " pod="openstack/ceilometer-0" Sep 29 19:28:17 crc kubenswrapper[4779]: I0929 19:28:17.056040 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/838e3b9e-959b-4b60-ba2b-4a81f2449f60-scripts\") pod \"ceilometer-0\" (UID: \"838e3b9e-959b-4b60-ba2b-4a81f2449f60\") " pod="openstack/ceilometer-0" Sep 29 19:28:17 crc kubenswrapper[4779]: I0929 19:28:17.056199 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/838e3b9e-959b-4b60-ba2b-4a81f2449f60-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"838e3b9e-959b-4b60-ba2b-4a81f2449f60\") " pod="openstack/ceilometer-0" Sep 29 19:28:17 crc kubenswrapper[4779]: I0929 19:28:17.056292 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/838e3b9e-959b-4b60-ba2b-4a81f2449f60-run-httpd\") pod \"ceilometer-0\" (UID: \"838e3b9e-959b-4b60-ba2b-4a81f2449f60\") " pod="openstack/ceilometer-0" Sep 29 19:28:17 crc kubenswrapper[4779]: I0929 19:28:17.056379 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/838e3b9e-959b-4b60-ba2b-4a81f2449f60-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"838e3b9e-959b-4b60-ba2b-4a81f2449f60\") " pod="openstack/ceilometer-0" Sep 29 19:28:17 crc kubenswrapper[4779]: I0929 19:28:17.056571 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/838e3b9e-959b-4b60-ba2b-4a81f2449f60-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"838e3b9e-959b-4b60-ba2b-4a81f2449f60\") " pod="openstack/ceilometer-0" Sep 29 19:28:17 crc kubenswrapper[4779]: I0929 19:28:17.056690 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/838e3b9e-959b-4b60-ba2b-4a81f2449f60-config-data\") pod \"ceilometer-0\" (UID: \"838e3b9e-959b-4b60-ba2b-4a81f2449f60\") " pod="openstack/ceilometer-0" Sep 29 19:28:17 crc kubenswrapper[4779]: I0929 19:28:17.056815 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vx74d\" (UniqueName: \"kubernetes.io/projected/838e3b9e-959b-4b60-ba2b-4a81f2449f60-kube-api-access-vx74d\") pod \"ceilometer-0\" (UID: \"838e3b9e-959b-4b60-ba2b-4a81f2449f60\") " pod="openstack/ceilometer-0" Sep 29 19:28:17 crc kubenswrapper[4779]: I0929 19:28:17.159864 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vx74d\" (UniqueName: \"kubernetes.io/projected/838e3b9e-959b-4b60-ba2b-4a81f2449f60-kube-api-access-vx74d\") pod \"ceilometer-0\" (UID: \"838e3b9e-959b-4b60-ba2b-4a81f2449f60\") " pod="openstack/ceilometer-0" Sep 29 19:28:17 crc kubenswrapper[4779]: I0929 19:28:17.160012 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/838e3b9e-959b-4b60-ba2b-4a81f2449f60-log-httpd\") pod \"ceilometer-0\" (UID: \"838e3b9e-959b-4b60-ba2b-4a81f2449f60\") " pod="openstack/ceilometer-0" Sep 29 19:28:17 crc kubenswrapper[4779]: I0929 19:28:17.160065 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/838e3b9e-959b-4b60-ba2b-4a81f2449f60-scripts\") pod \"ceilometer-0\" (UID: \"838e3b9e-959b-4b60-ba2b-4a81f2449f60\") " pod="openstack/ceilometer-0" Sep 29 19:28:17 crc kubenswrapper[4779]: I0929 19:28:17.160157 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/838e3b9e-959b-4b60-ba2b-4a81f2449f60-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"838e3b9e-959b-4b60-ba2b-4a81f2449f60\") " pod="openstack/ceilometer-0" Sep 29 19:28:17 crc kubenswrapper[4779]: I0929 19:28:17.160226 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/838e3b9e-959b-4b60-ba2b-4a81f2449f60-run-httpd\") pod \"ceilometer-0\" (UID: \"838e3b9e-959b-4b60-ba2b-4a81f2449f60\") " pod="openstack/ceilometer-0" Sep 29 19:28:17 crc kubenswrapper[4779]: I0929 19:28:17.160282 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/838e3b9e-959b-4b60-ba2b-4a81f2449f60-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"838e3b9e-959b-4b60-ba2b-4a81f2449f60\") " pod="openstack/ceilometer-0" Sep 29 19:28:17 crc kubenswrapper[4779]: I0929 19:28:17.160465 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/838e3b9e-959b-4b60-ba2b-4a81f2449f60-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"838e3b9e-959b-4b60-ba2b-4a81f2449f60\") " pod="openstack/ceilometer-0" Sep 29 19:28:17 crc kubenswrapper[4779]: I0929 19:28:17.160582 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/838e3b9e-959b-4b60-ba2b-4a81f2449f60-config-data\") pod \"ceilometer-0\" (UID: \"838e3b9e-959b-4b60-ba2b-4a81f2449f60\") " pod="openstack/ceilometer-0" Sep 29 19:28:17 crc kubenswrapper[4779]: I0929 19:28:17.160617 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/838e3b9e-959b-4b60-ba2b-4a81f2449f60-log-httpd\") pod \"ceilometer-0\" (UID: \"838e3b9e-959b-4b60-ba2b-4a81f2449f60\") " pod="openstack/ceilometer-0" Sep 29 19:28:17 crc kubenswrapper[4779]: I0929 19:28:17.160975 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/838e3b9e-959b-4b60-ba2b-4a81f2449f60-run-httpd\") pod \"ceilometer-0\" (UID: \"838e3b9e-959b-4b60-ba2b-4a81f2449f60\") " pod="openstack/ceilometer-0" Sep 29 19:28:17 crc kubenswrapper[4779]: I0929 19:28:17.166146 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/838e3b9e-959b-4b60-ba2b-4a81f2449f60-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"838e3b9e-959b-4b60-ba2b-4a81f2449f60\") " pod="openstack/ceilometer-0" Sep 29 19:28:17 crc kubenswrapper[4779]: I0929 19:28:17.166300 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/838e3b9e-959b-4b60-ba2b-4a81f2449f60-scripts\") pod \"ceilometer-0\" (UID: \"838e3b9e-959b-4b60-ba2b-4a81f2449f60\") " pod="openstack/ceilometer-0" Sep 29 19:28:17 crc kubenswrapper[4779]: I0929 19:28:17.166571 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/838e3b9e-959b-4b60-ba2b-4a81f2449f60-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"838e3b9e-959b-4b60-ba2b-4a81f2449f60\") " pod="openstack/ceilometer-0" Sep 29 19:28:17 crc kubenswrapper[4779]: I0929 19:28:17.166972 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/838e3b9e-959b-4b60-ba2b-4a81f2449f60-config-data\") pod \"ceilometer-0\" (UID: \"838e3b9e-959b-4b60-ba2b-4a81f2449f60\") " pod="openstack/ceilometer-0" Sep 29 19:28:17 crc kubenswrapper[4779]: I0929 19:28:17.167296 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/838e3b9e-959b-4b60-ba2b-4a81f2449f60-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"838e3b9e-959b-4b60-ba2b-4a81f2449f60\") " pod="openstack/ceilometer-0" Sep 29 19:28:17 crc kubenswrapper[4779]: I0929 19:28:17.180702 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vx74d\" (UniqueName: \"kubernetes.io/projected/838e3b9e-959b-4b60-ba2b-4a81f2449f60-kube-api-access-vx74d\") pod \"ceilometer-0\" (UID: \"838e3b9e-959b-4b60-ba2b-4a81f2449f60\") " pod="openstack/ceilometer-0" Sep 29 19:28:17 crc kubenswrapper[4779]: I0929 19:28:17.313076 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 19:28:17 crc kubenswrapper[4779]: I0929 19:28:17.778365 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e1ced773-4819-4d6b-a4db-9d6bb5f6f9c3" path="/var/lib/kubelet/pods/e1ced773-4819-4d6b-a4db-9d6bb5f6f9c3/volumes" Sep 29 19:28:17 crc kubenswrapper[4779]: I0929 19:28:17.787085 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 29 19:28:17 crc kubenswrapper[4779]: W0929 19:28:17.794436 4779 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod838e3b9e_959b_4b60_ba2b_4a81f2449f60.slice/crio-2cbaafe407b753cf950147fa36c70340351b916e0e0121e551afca5391f822c2 WatchSource:0}: Error finding container 2cbaafe407b753cf950147fa36c70340351b916e0e0121e551afca5391f822c2: Status 404 returned error can't find the container with id 2cbaafe407b753cf950147fa36c70340351b916e0e0121e551afca5391f822c2 Sep 29 19:28:17 crc kubenswrapper[4779]: I0929 19:28:17.797563 4779 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Sep 29 19:28:18 crc kubenswrapper[4779]: I0929 19:28:18.601207 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"838e3b9e-959b-4b60-ba2b-4a81f2449f60","Type":"ContainerStarted","Data":"2cbaafe407b753cf950147fa36c70340351b916e0e0121e551afca5391f822c2"} Sep 29 19:28:19 crc kubenswrapper[4779]: I0929 19:28:19.619075 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"838e3b9e-959b-4b60-ba2b-4a81f2449f60","Type":"ContainerStarted","Data":"3c2d35bddce92a8b9c9eff36f77354c7df98556a9a8a6ef29537a757aeaf4023"} Sep 29 19:28:19 crc kubenswrapper[4779]: I0929 19:28:19.620528 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"838e3b9e-959b-4b60-ba2b-4a81f2449f60","Type":"ContainerStarted","Data":"31b2408c12909ab2b56be18b2d325eb22c5886593a8b4fab829bb77220715a89"} Sep 29 19:28:19 crc kubenswrapper[4779]: I0929 19:28:19.709294 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Sep 29 19:28:19 crc kubenswrapper[4779]: I0929 19:28:19.710066 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Sep 29 19:28:19 crc kubenswrapper[4779]: I0929 19:28:19.710289 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Sep 29 19:28:19 crc kubenswrapper[4779]: I0929 19:28:19.712812 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Sep 29 19:28:20 crc kubenswrapper[4779]: I0929 19:28:20.218150 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Sep 29 19:28:20 crc kubenswrapper[4779]: I0929 19:28:20.223861 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Sep 29 19:28:20 crc kubenswrapper[4779]: I0929 19:28:20.233032 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Sep 29 19:28:20 crc kubenswrapper[4779]: I0929 19:28:20.627822 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"838e3b9e-959b-4b60-ba2b-4a81f2449f60","Type":"ContainerStarted","Data":"0ab94c689ec3d4d4fd907cc168d20f42a962861ce4ea400e98d33908d59e0e14"} Sep 29 19:28:20 crc kubenswrapper[4779]: I0929 19:28:20.628542 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Sep 29 19:28:20 crc kubenswrapper[4779]: I0929 19:28:20.634276 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Sep 29 19:28:20 crc kubenswrapper[4779]: I0929 19:28:20.635295 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Sep 29 19:28:20 crc kubenswrapper[4779]: I0929 19:28:20.827095 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-59cf4bdb65-mzp42"] Sep 29 19:28:20 crc kubenswrapper[4779]: I0929 19:28:20.828962 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-59cf4bdb65-mzp42" Sep 29 19:28:20 crc kubenswrapper[4779]: I0929 19:28:20.839177 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-59cf4bdb65-mzp42"] Sep 29 19:28:20 crc kubenswrapper[4779]: I0929 19:28:20.927808 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ac6039d1-710e-4a9b-b96b-c20dd2c9937f-ovsdbserver-nb\") pod \"dnsmasq-dns-59cf4bdb65-mzp42\" (UID: \"ac6039d1-710e-4a9b-b96b-c20dd2c9937f\") " pod="openstack/dnsmasq-dns-59cf4bdb65-mzp42" Sep 29 19:28:20 crc kubenswrapper[4779]: I0929 19:28:20.927861 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ac6039d1-710e-4a9b-b96b-c20dd2c9937f-dns-svc\") pod \"dnsmasq-dns-59cf4bdb65-mzp42\" (UID: \"ac6039d1-710e-4a9b-b96b-c20dd2c9937f\") " pod="openstack/dnsmasq-dns-59cf4bdb65-mzp42" Sep 29 19:28:20 crc kubenswrapper[4779]: I0929 19:28:20.927916 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ac6039d1-710e-4a9b-b96b-c20dd2c9937f-config\") pod \"dnsmasq-dns-59cf4bdb65-mzp42\" (UID: \"ac6039d1-710e-4a9b-b96b-c20dd2c9937f\") " pod="openstack/dnsmasq-dns-59cf4bdb65-mzp42" Sep 29 19:28:20 crc kubenswrapper[4779]: I0929 19:28:20.927960 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ac6039d1-710e-4a9b-b96b-c20dd2c9937f-ovsdbserver-sb\") pod \"dnsmasq-dns-59cf4bdb65-mzp42\" (UID: \"ac6039d1-710e-4a9b-b96b-c20dd2c9937f\") " pod="openstack/dnsmasq-dns-59cf4bdb65-mzp42" Sep 29 19:28:20 crc kubenswrapper[4779]: I0929 19:28:20.927991 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rv5cx\" (UniqueName: \"kubernetes.io/projected/ac6039d1-710e-4a9b-b96b-c20dd2c9937f-kube-api-access-rv5cx\") pod \"dnsmasq-dns-59cf4bdb65-mzp42\" (UID: \"ac6039d1-710e-4a9b-b96b-c20dd2c9937f\") " pod="openstack/dnsmasq-dns-59cf4bdb65-mzp42" Sep 29 19:28:20 crc kubenswrapper[4779]: I0929 19:28:20.928025 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/ac6039d1-710e-4a9b-b96b-c20dd2c9937f-dns-swift-storage-0\") pod \"dnsmasq-dns-59cf4bdb65-mzp42\" (UID: \"ac6039d1-710e-4a9b-b96b-c20dd2c9937f\") " pod="openstack/dnsmasq-dns-59cf4bdb65-mzp42" Sep 29 19:28:21 crc kubenswrapper[4779]: I0929 19:28:21.029487 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ac6039d1-710e-4a9b-b96b-c20dd2c9937f-dns-svc\") pod \"dnsmasq-dns-59cf4bdb65-mzp42\" (UID: \"ac6039d1-710e-4a9b-b96b-c20dd2c9937f\") " pod="openstack/dnsmasq-dns-59cf4bdb65-mzp42" Sep 29 19:28:21 crc kubenswrapper[4779]: I0929 19:28:21.029566 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ac6039d1-710e-4a9b-b96b-c20dd2c9937f-config\") pod \"dnsmasq-dns-59cf4bdb65-mzp42\" (UID: \"ac6039d1-710e-4a9b-b96b-c20dd2c9937f\") " pod="openstack/dnsmasq-dns-59cf4bdb65-mzp42" Sep 29 19:28:21 crc kubenswrapper[4779]: I0929 19:28:21.029589 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ac6039d1-710e-4a9b-b96b-c20dd2c9937f-ovsdbserver-sb\") pod \"dnsmasq-dns-59cf4bdb65-mzp42\" (UID: \"ac6039d1-710e-4a9b-b96b-c20dd2c9937f\") " pod="openstack/dnsmasq-dns-59cf4bdb65-mzp42" Sep 29 19:28:21 crc kubenswrapper[4779]: I0929 19:28:21.029626 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rv5cx\" (UniqueName: \"kubernetes.io/projected/ac6039d1-710e-4a9b-b96b-c20dd2c9937f-kube-api-access-rv5cx\") pod \"dnsmasq-dns-59cf4bdb65-mzp42\" (UID: \"ac6039d1-710e-4a9b-b96b-c20dd2c9937f\") " pod="openstack/dnsmasq-dns-59cf4bdb65-mzp42" Sep 29 19:28:21 crc kubenswrapper[4779]: I0929 19:28:21.029678 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/ac6039d1-710e-4a9b-b96b-c20dd2c9937f-dns-swift-storage-0\") pod \"dnsmasq-dns-59cf4bdb65-mzp42\" (UID: \"ac6039d1-710e-4a9b-b96b-c20dd2c9937f\") " pod="openstack/dnsmasq-dns-59cf4bdb65-mzp42" Sep 29 19:28:21 crc kubenswrapper[4779]: I0929 19:28:21.029827 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ac6039d1-710e-4a9b-b96b-c20dd2c9937f-ovsdbserver-nb\") pod \"dnsmasq-dns-59cf4bdb65-mzp42\" (UID: \"ac6039d1-710e-4a9b-b96b-c20dd2c9937f\") " pod="openstack/dnsmasq-dns-59cf4bdb65-mzp42" Sep 29 19:28:21 crc kubenswrapper[4779]: I0929 19:28:21.030879 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ac6039d1-710e-4a9b-b96b-c20dd2c9937f-dns-svc\") pod \"dnsmasq-dns-59cf4bdb65-mzp42\" (UID: \"ac6039d1-710e-4a9b-b96b-c20dd2c9937f\") " pod="openstack/dnsmasq-dns-59cf4bdb65-mzp42" Sep 29 19:28:21 crc kubenswrapper[4779]: I0929 19:28:21.030897 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ac6039d1-710e-4a9b-b96b-c20dd2c9937f-ovsdbserver-nb\") pod \"dnsmasq-dns-59cf4bdb65-mzp42\" (UID: \"ac6039d1-710e-4a9b-b96b-c20dd2c9937f\") " pod="openstack/dnsmasq-dns-59cf4bdb65-mzp42" Sep 29 19:28:21 crc kubenswrapper[4779]: I0929 19:28:21.031109 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ac6039d1-710e-4a9b-b96b-c20dd2c9937f-config\") pod \"dnsmasq-dns-59cf4bdb65-mzp42\" (UID: \"ac6039d1-710e-4a9b-b96b-c20dd2c9937f\") " pod="openstack/dnsmasq-dns-59cf4bdb65-mzp42" Sep 29 19:28:21 crc kubenswrapper[4779]: I0929 19:28:21.031142 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/ac6039d1-710e-4a9b-b96b-c20dd2c9937f-dns-swift-storage-0\") pod \"dnsmasq-dns-59cf4bdb65-mzp42\" (UID: \"ac6039d1-710e-4a9b-b96b-c20dd2c9937f\") " pod="openstack/dnsmasq-dns-59cf4bdb65-mzp42" Sep 29 19:28:21 crc kubenswrapper[4779]: I0929 19:28:21.031219 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ac6039d1-710e-4a9b-b96b-c20dd2c9937f-ovsdbserver-sb\") pod \"dnsmasq-dns-59cf4bdb65-mzp42\" (UID: \"ac6039d1-710e-4a9b-b96b-c20dd2c9937f\") " pod="openstack/dnsmasq-dns-59cf4bdb65-mzp42" Sep 29 19:28:21 crc kubenswrapper[4779]: I0929 19:28:21.050930 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rv5cx\" (UniqueName: \"kubernetes.io/projected/ac6039d1-710e-4a9b-b96b-c20dd2c9937f-kube-api-access-rv5cx\") pod \"dnsmasq-dns-59cf4bdb65-mzp42\" (UID: \"ac6039d1-710e-4a9b-b96b-c20dd2c9937f\") " pod="openstack/dnsmasq-dns-59cf4bdb65-mzp42" Sep 29 19:28:21 crc kubenswrapper[4779]: I0929 19:28:21.152765 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-59cf4bdb65-mzp42" Sep 29 19:28:21 crc kubenswrapper[4779]: W0929 19:28:21.672919 4779 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podac6039d1_710e_4a9b_b96b_c20dd2c9937f.slice/crio-213ee505c47ceefc698503d5728b588fa9d66b82fdcd56e525a48be7439c9a39 WatchSource:0}: Error finding container 213ee505c47ceefc698503d5728b588fa9d66b82fdcd56e525a48be7439c9a39: Status 404 returned error can't find the container with id 213ee505c47ceefc698503d5728b588fa9d66b82fdcd56e525a48be7439c9a39 Sep 29 19:28:21 crc kubenswrapper[4779]: I0929 19:28:21.674118 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-59cf4bdb65-mzp42"] Sep 29 19:28:22 crc kubenswrapper[4779]: I0929 19:28:22.660289 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"838e3b9e-959b-4b60-ba2b-4a81f2449f60","Type":"ContainerStarted","Data":"936013deef010775fc421a3b46f44c920cde7d367f7f0b511b8fb19206c8192f"} Sep 29 19:28:22 crc kubenswrapper[4779]: I0929 19:28:22.660934 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Sep 29 19:28:22 crc kubenswrapper[4779]: I0929 19:28:22.664771 4779 generic.go:334] "Generic (PLEG): container finished" podID="ac6039d1-710e-4a9b-b96b-c20dd2c9937f" containerID="0a15989b4f9b84fe7d394d4b3fd8d9d39c5f8f3e95fea9b6a9dd178d5b7a90eb" exitCode=0 Sep 29 19:28:22 crc kubenswrapper[4779]: I0929 19:28:22.664889 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-59cf4bdb65-mzp42" event={"ID":"ac6039d1-710e-4a9b-b96b-c20dd2c9937f","Type":"ContainerDied","Data":"0a15989b4f9b84fe7d394d4b3fd8d9d39c5f8f3e95fea9b6a9dd178d5b7a90eb"} Sep 29 19:28:22 crc kubenswrapper[4779]: I0929 19:28:22.664997 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-59cf4bdb65-mzp42" event={"ID":"ac6039d1-710e-4a9b-b96b-c20dd2c9937f","Type":"ContainerStarted","Data":"213ee505c47ceefc698503d5728b588fa9d66b82fdcd56e525a48be7439c9a39"} Sep 29 19:28:22 crc kubenswrapper[4779]: I0929 19:28:22.713113 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=3.109227622 podStartE2EDuration="6.713091514s" podCreationTimestamp="2025-09-29 19:28:16 +0000 UTC" firstStartedPulling="2025-09-29 19:28:17.797330087 +0000 UTC m=+1208.681755197" lastFinishedPulling="2025-09-29 19:28:21.401193969 +0000 UTC m=+1212.285619089" observedRunningTime="2025-09-29 19:28:22.685640555 +0000 UTC m=+1213.570065655" watchObservedRunningTime="2025-09-29 19:28:22.713091514 +0000 UTC m=+1213.597516614" Sep 29 19:28:23 crc kubenswrapper[4779]: I0929 19:28:23.108076 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Sep 29 19:28:23 crc kubenswrapper[4779]: I0929 19:28:23.675804 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-59cf4bdb65-mzp42" event={"ID":"ac6039d1-710e-4a9b-b96b-c20dd2c9937f","Type":"ContainerStarted","Data":"9b603cf1ac3e2424b11fe2c76a37312176ae2d390cb29bb7cdebde69ea689d1f"} Sep 29 19:28:23 crc kubenswrapper[4779]: I0929 19:28:23.676330 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="b1327ac7-e940-435f-b344-f03e2c406800" containerName="nova-api-log" containerID="cri-o://1398aa74149e59d441ba14ca6f41790588a126f093f60a05b30a0b0d73e57d18" gracePeriod=30 Sep 29 19:28:23 crc kubenswrapper[4779]: I0929 19:28:23.676341 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="b1327ac7-e940-435f-b344-f03e2c406800" containerName="nova-api-api" containerID="cri-o://a866ab138804053e3ed6b09ce2acbbb4e5f81198b6cd3ff405153f855c13d85b" gracePeriod=30 Sep 29 19:28:23 crc kubenswrapper[4779]: I0929 19:28:23.701374 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-59cf4bdb65-mzp42" podStartSLOduration=3.701359628 podStartE2EDuration="3.701359628s" podCreationTimestamp="2025-09-29 19:28:20 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 19:28:23.695071046 +0000 UTC m=+1214.579496146" watchObservedRunningTime="2025-09-29 19:28:23.701359628 +0000 UTC m=+1214.585784728" Sep 29 19:28:23 crc kubenswrapper[4779]: I0929 19:28:23.879556 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/kube-state-metrics-0" Sep 29 19:28:24 crc kubenswrapper[4779]: I0929 19:28:24.256415 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Sep 29 19:28:24 crc kubenswrapper[4779]: I0929 19:28:24.685759 4779 generic.go:334] "Generic (PLEG): container finished" podID="b1327ac7-e940-435f-b344-f03e2c406800" containerID="1398aa74149e59d441ba14ca6f41790588a126f093f60a05b30a0b0d73e57d18" exitCode=143 Sep 29 19:28:24 crc kubenswrapper[4779]: I0929 19:28:24.685844 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"b1327ac7-e940-435f-b344-f03e2c406800","Type":"ContainerDied","Data":"1398aa74149e59d441ba14ca6f41790588a126f093f60a05b30a0b0d73e57d18"} Sep 29 19:28:24 crc kubenswrapper[4779]: I0929 19:28:24.686046 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="838e3b9e-959b-4b60-ba2b-4a81f2449f60" containerName="ceilometer-central-agent" containerID="cri-o://31b2408c12909ab2b56be18b2d325eb22c5886593a8b4fab829bb77220715a89" gracePeriod=30 Sep 29 19:28:24 crc kubenswrapper[4779]: I0929 19:28:24.686160 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="838e3b9e-959b-4b60-ba2b-4a81f2449f60" containerName="proxy-httpd" containerID="cri-o://936013deef010775fc421a3b46f44c920cde7d367f7f0b511b8fb19206c8192f" gracePeriod=30 Sep 29 19:28:24 crc kubenswrapper[4779]: I0929 19:28:24.686221 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="838e3b9e-959b-4b60-ba2b-4a81f2449f60" containerName="sg-core" containerID="cri-o://0ab94c689ec3d4d4fd907cc168d20f42a962861ce4ea400e98d33908d59e0e14" gracePeriod=30 Sep 29 19:28:24 crc kubenswrapper[4779]: I0929 19:28:24.686267 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="838e3b9e-959b-4b60-ba2b-4a81f2449f60" containerName="ceilometer-notification-agent" containerID="cri-o://3c2d35bddce92a8b9c9eff36f77354c7df98556a9a8a6ef29537a757aeaf4023" gracePeriod=30 Sep 29 19:28:24 crc kubenswrapper[4779]: I0929 19:28:24.686719 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-59cf4bdb65-mzp42" Sep 29 19:28:25 crc kubenswrapper[4779]: I0929 19:28:25.551443 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 19:28:25 crc kubenswrapper[4779]: I0929 19:28:25.634035 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vx74d\" (UniqueName: \"kubernetes.io/projected/838e3b9e-959b-4b60-ba2b-4a81f2449f60-kube-api-access-vx74d\") pod \"838e3b9e-959b-4b60-ba2b-4a81f2449f60\" (UID: \"838e3b9e-959b-4b60-ba2b-4a81f2449f60\") " Sep 29 19:28:25 crc kubenswrapper[4779]: I0929 19:28:25.634181 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/838e3b9e-959b-4b60-ba2b-4a81f2449f60-scripts\") pod \"838e3b9e-959b-4b60-ba2b-4a81f2449f60\" (UID: \"838e3b9e-959b-4b60-ba2b-4a81f2449f60\") " Sep 29 19:28:25 crc kubenswrapper[4779]: I0929 19:28:25.634257 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/838e3b9e-959b-4b60-ba2b-4a81f2449f60-combined-ca-bundle\") pod \"838e3b9e-959b-4b60-ba2b-4a81f2449f60\" (UID: \"838e3b9e-959b-4b60-ba2b-4a81f2449f60\") " Sep 29 19:28:25 crc kubenswrapper[4779]: I0929 19:28:25.634409 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/838e3b9e-959b-4b60-ba2b-4a81f2449f60-log-httpd\") pod \"838e3b9e-959b-4b60-ba2b-4a81f2449f60\" (UID: \"838e3b9e-959b-4b60-ba2b-4a81f2449f60\") " Sep 29 19:28:25 crc kubenswrapper[4779]: I0929 19:28:25.634464 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/838e3b9e-959b-4b60-ba2b-4a81f2449f60-ceilometer-tls-certs\") pod \"838e3b9e-959b-4b60-ba2b-4a81f2449f60\" (UID: \"838e3b9e-959b-4b60-ba2b-4a81f2449f60\") " Sep 29 19:28:25 crc kubenswrapper[4779]: I0929 19:28:25.634497 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/838e3b9e-959b-4b60-ba2b-4a81f2449f60-run-httpd\") pod \"838e3b9e-959b-4b60-ba2b-4a81f2449f60\" (UID: \"838e3b9e-959b-4b60-ba2b-4a81f2449f60\") " Sep 29 19:28:25 crc kubenswrapper[4779]: I0929 19:28:25.634516 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/838e3b9e-959b-4b60-ba2b-4a81f2449f60-config-data\") pod \"838e3b9e-959b-4b60-ba2b-4a81f2449f60\" (UID: \"838e3b9e-959b-4b60-ba2b-4a81f2449f60\") " Sep 29 19:28:25 crc kubenswrapper[4779]: I0929 19:28:25.634546 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/838e3b9e-959b-4b60-ba2b-4a81f2449f60-sg-core-conf-yaml\") pod \"838e3b9e-959b-4b60-ba2b-4a81f2449f60\" (UID: \"838e3b9e-959b-4b60-ba2b-4a81f2449f60\") " Sep 29 19:28:25 crc kubenswrapper[4779]: I0929 19:28:25.635436 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/838e3b9e-959b-4b60-ba2b-4a81f2449f60-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "838e3b9e-959b-4b60-ba2b-4a81f2449f60" (UID: "838e3b9e-959b-4b60-ba2b-4a81f2449f60"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 19:28:25 crc kubenswrapper[4779]: I0929 19:28:25.636187 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/838e3b9e-959b-4b60-ba2b-4a81f2449f60-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "838e3b9e-959b-4b60-ba2b-4a81f2449f60" (UID: "838e3b9e-959b-4b60-ba2b-4a81f2449f60"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 19:28:25 crc kubenswrapper[4779]: I0929 19:28:25.639492 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/838e3b9e-959b-4b60-ba2b-4a81f2449f60-scripts" (OuterVolumeSpecName: "scripts") pod "838e3b9e-959b-4b60-ba2b-4a81f2449f60" (UID: "838e3b9e-959b-4b60-ba2b-4a81f2449f60"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:28:25 crc kubenswrapper[4779]: I0929 19:28:25.639677 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/838e3b9e-959b-4b60-ba2b-4a81f2449f60-kube-api-access-vx74d" (OuterVolumeSpecName: "kube-api-access-vx74d") pod "838e3b9e-959b-4b60-ba2b-4a81f2449f60" (UID: "838e3b9e-959b-4b60-ba2b-4a81f2449f60"). InnerVolumeSpecName "kube-api-access-vx74d". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:28:25 crc kubenswrapper[4779]: I0929 19:28:25.667007 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/838e3b9e-959b-4b60-ba2b-4a81f2449f60-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "838e3b9e-959b-4b60-ba2b-4a81f2449f60" (UID: "838e3b9e-959b-4b60-ba2b-4a81f2449f60"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:28:25 crc kubenswrapper[4779]: I0929 19:28:25.688451 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/838e3b9e-959b-4b60-ba2b-4a81f2449f60-ceilometer-tls-certs" (OuterVolumeSpecName: "ceilometer-tls-certs") pod "838e3b9e-959b-4b60-ba2b-4a81f2449f60" (UID: "838e3b9e-959b-4b60-ba2b-4a81f2449f60"). InnerVolumeSpecName "ceilometer-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:28:25 crc kubenswrapper[4779]: I0929 19:28:25.697335 4779 generic.go:334] "Generic (PLEG): container finished" podID="838e3b9e-959b-4b60-ba2b-4a81f2449f60" containerID="936013deef010775fc421a3b46f44c920cde7d367f7f0b511b8fb19206c8192f" exitCode=0 Sep 29 19:28:25 crc kubenswrapper[4779]: I0929 19:28:25.697378 4779 generic.go:334] "Generic (PLEG): container finished" podID="838e3b9e-959b-4b60-ba2b-4a81f2449f60" containerID="0ab94c689ec3d4d4fd907cc168d20f42a962861ce4ea400e98d33908d59e0e14" exitCode=2 Sep 29 19:28:25 crc kubenswrapper[4779]: I0929 19:28:25.697395 4779 generic.go:334] "Generic (PLEG): container finished" podID="838e3b9e-959b-4b60-ba2b-4a81f2449f60" containerID="3c2d35bddce92a8b9c9eff36f77354c7df98556a9a8a6ef29537a757aeaf4023" exitCode=0 Sep 29 19:28:25 crc kubenswrapper[4779]: I0929 19:28:25.697409 4779 generic.go:334] "Generic (PLEG): container finished" podID="838e3b9e-959b-4b60-ba2b-4a81f2449f60" containerID="31b2408c12909ab2b56be18b2d325eb22c5886593a8b4fab829bb77220715a89" exitCode=0 Sep 29 19:28:25 crc kubenswrapper[4779]: I0929 19:28:25.697393 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 19:28:25 crc kubenswrapper[4779]: I0929 19:28:25.697376 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"838e3b9e-959b-4b60-ba2b-4a81f2449f60","Type":"ContainerDied","Data":"936013deef010775fc421a3b46f44c920cde7d367f7f0b511b8fb19206c8192f"} Sep 29 19:28:25 crc kubenswrapper[4779]: I0929 19:28:25.697491 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"838e3b9e-959b-4b60-ba2b-4a81f2449f60","Type":"ContainerDied","Data":"0ab94c689ec3d4d4fd907cc168d20f42a962861ce4ea400e98d33908d59e0e14"} Sep 29 19:28:25 crc kubenswrapper[4779]: I0929 19:28:25.697520 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"838e3b9e-959b-4b60-ba2b-4a81f2449f60","Type":"ContainerDied","Data":"3c2d35bddce92a8b9c9eff36f77354c7df98556a9a8a6ef29537a757aeaf4023"} Sep 29 19:28:25 crc kubenswrapper[4779]: I0929 19:28:25.697541 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"838e3b9e-959b-4b60-ba2b-4a81f2449f60","Type":"ContainerDied","Data":"31b2408c12909ab2b56be18b2d325eb22c5886593a8b4fab829bb77220715a89"} Sep 29 19:28:25 crc kubenswrapper[4779]: I0929 19:28:25.697563 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"838e3b9e-959b-4b60-ba2b-4a81f2449f60","Type":"ContainerDied","Data":"2cbaafe407b753cf950147fa36c70340351b916e0e0121e551afca5391f822c2"} Sep 29 19:28:25 crc kubenswrapper[4779]: I0929 19:28:25.697594 4779 scope.go:117] "RemoveContainer" containerID="936013deef010775fc421a3b46f44c920cde7d367f7f0b511b8fb19206c8192f" Sep 29 19:28:25 crc kubenswrapper[4779]: I0929 19:28:25.717892 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/838e3b9e-959b-4b60-ba2b-4a81f2449f60-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "838e3b9e-959b-4b60-ba2b-4a81f2449f60" (UID: "838e3b9e-959b-4b60-ba2b-4a81f2449f60"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:28:25 crc kubenswrapper[4779]: I0929 19:28:25.722694 4779 scope.go:117] "RemoveContainer" containerID="0ab94c689ec3d4d4fd907cc168d20f42a962861ce4ea400e98d33908d59e0e14" Sep 29 19:28:25 crc kubenswrapper[4779]: I0929 19:28:25.737352 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vx74d\" (UniqueName: \"kubernetes.io/projected/838e3b9e-959b-4b60-ba2b-4a81f2449f60-kube-api-access-vx74d\") on node \"crc\" DevicePath \"\"" Sep 29 19:28:25 crc kubenswrapper[4779]: I0929 19:28:25.737405 4779 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/838e3b9e-959b-4b60-ba2b-4a81f2449f60-scripts\") on node \"crc\" DevicePath \"\"" Sep 29 19:28:25 crc kubenswrapper[4779]: I0929 19:28:25.737427 4779 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/838e3b9e-959b-4b60-ba2b-4a81f2449f60-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 19:28:25 crc kubenswrapper[4779]: I0929 19:28:25.737447 4779 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/838e3b9e-959b-4b60-ba2b-4a81f2449f60-log-httpd\") on node \"crc\" DevicePath \"\"" Sep 29 19:28:25 crc kubenswrapper[4779]: I0929 19:28:25.737467 4779 reconciler_common.go:293] "Volume detached for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/838e3b9e-959b-4b60-ba2b-4a81f2449f60-ceilometer-tls-certs\") on node \"crc\" DevicePath \"\"" Sep 29 19:28:25 crc kubenswrapper[4779]: I0929 19:28:25.737485 4779 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/838e3b9e-959b-4b60-ba2b-4a81f2449f60-run-httpd\") on node \"crc\" DevicePath \"\"" Sep 29 19:28:25 crc kubenswrapper[4779]: I0929 19:28:25.737502 4779 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/838e3b9e-959b-4b60-ba2b-4a81f2449f60-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Sep 29 19:28:25 crc kubenswrapper[4779]: I0929 19:28:25.747562 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/838e3b9e-959b-4b60-ba2b-4a81f2449f60-config-data" (OuterVolumeSpecName: "config-data") pod "838e3b9e-959b-4b60-ba2b-4a81f2449f60" (UID: "838e3b9e-959b-4b60-ba2b-4a81f2449f60"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:28:25 crc kubenswrapper[4779]: I0929 19:28:25.755328 4779 scope.go:117] "RemoveContainer" containerID="3c2d35bddce92a8b9c9eff36f77354c7df98556a9a8a6ef29537a757aeaf4023" Sep 29 19:28:25 crc kubenswrapper[4779]: I0929 19:28:25.787396 4779 scope.go:117] "RemoveContainer" containerID="31b2408c12909ab2b56be18b2d325eb22c5886593a8b4fab829bb77220715a89" Sep 29 19:28:25 crc kubenswrapper[4779]: I0929 19:28:25.840129 4779 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/838e3b9e-959b-4b60-ba2b-4a81f2449f60-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 19:28:25 crc kubenswrapper[4779]: I0929 19:28:25.887239 4779 scope.go:117] "RemoveContainer" containerID="936013deef010775fc421a3b46f44c920cde7d367f7f0b511b8fb19206c8192f" Sep 29 19:28:25 crc kubenswrapper[4779]: E0929 19:28:25.889944 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"936013deef010775fc421a3b46f44c920cde7d367f7f0b511b8fb19206c8192f\": container with ID starting with 936013deef010775fc421a3b46f44c920cde7d367f7f0b511b8fb19206c8192f not found: ID does not exist" containerID="936013deef010775fc421a3b46f44c920cde7d367f7f0b511b8fb19206c8192f" Sep 29 19:28:25 crc kubenswrapper[4779]: I0929 19:28:25.890039 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"936013deef010775fc421a3b46f44c920cde7d367f7f0b511b8fb19206c8192f"} err="failed to get container status \"936013deef010775fc421a3b46f44c920cde7d367f7f0b511b8fb19206c8192f\": rpc error: code = NotFound desc = could not find container \"936013deef010775fc421a3b46f44c920cde7d367f7f0b511b8fb19206c8192f\": container with ID starting with 936013deef010775fc421a3b46f44c920cde7d367f7f0b511b8fb19206c8192f not found: ID does not exist" Sep 29 19:28:25 crc kubenswrapper[4779]: I0929 19:28:25.890119 4779 scope.go:117] "RemoveContainer" containerID="0ab94c689ec3d4d4fd907cc168d20f42a962861ce4ea400e98d33908d59e0e14" Sep 29 19:28:25 crc kubenswrapper[4779]: E0929 19:28:25.890767 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0ab94c689ec3d4d4fd907cc168d20f42a962861ce4ea400e98d33908d59e0e14\": container with ID starting with 0ab94c689ec3d4d4fd907cc168d20f42a962861ce4ea400e98d33908d59e0e14 not found: ID does not exist" containerID="0ab94c689ec3d4d4fd907cc168d20f42a962861ce4ea400e98d33908d59e0e14" Sep 29 19:28:25 crc kubenswrapper[4779]: I0929 19:28:25.890827 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0ab94c689ec3d4d4fd907cc168d20f42a962861ce4ea400e98d33908d59e0e14"} err="failed to get container status \"0ab94c689ec3d4d4fd907cc168d20f42a962861ce4ea400e98d33908d59e0e14\": rpc error: code = NotFound desc = could not find container \"0ab94c689ec3d4d4fd907cc168d20f42a962861ce4ea400e98d33908d59e0e14\": container with ID starting with 0ab94c689ec3d4d4fd907cc168d20f42a962861ce4ea400e98d33908d59e0e14 not found: ID does not exist" Sep 29 19:28:25 crc kubenswrapper[4779]: I0929 19:28:25.890860 4779 scope.go:117] "RemoveContainer" containerID="3c2d35bddce92a8b9c9eff36f77354c7df98556a9a8a6ef29537a757aeaf4023" Sep 29 19:28:25 crc kubenswrapper[4779]: E0929 19:28:25.891417 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3c2d35bddce92a8b9c9eff36f77354c7df98556a9a8a6ef29537a757aeaf4023\": container with ID starting with 3c2d35bddce92a8b9c9eff36f77354c7df98556a9a8a6ef29537a757aeaf4023 not found: ID does not exist" containerID="3c2d35bddce92a8b9c9eff36f77354c7df98556a9a8a6ef29537a757aeaf4023" Sep 29 19:28:25 crc kubenswrapper[4779]: I0929 19:28:25.891468 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3c2d35bddce92a8b9c9eff36f77354c7df98556a9a8a6ef29537a757aeaf4023"} err="failed to get container status \"3c2d35bddce92a8b9c9eff36f77354c7df98556a9a8a6ef29537a757aeaf4023\": rpc error: code = NotFound desc = could not find container \"3c2d35bddce92a8b9c9eff36f77354c7df98556a9a8a6ef29537a757aeaf4023\": container with ID starting with 3c2d35bddce92a8b9c9eff36f77354c7df98556a9a8a6ef29537a757aeaf4023 not found: ID does not exist" Sep 29 19:28:25 crc kubenswrapper[4779]: I0929 19:28:25.891503 4779 scope.go:117] "RemoveContainer" containerID="31b2408c12909ab2b56be18b2d325eb22c5886593a8b4fab829bb77220715a89" Sep 29 19:28:25 crc kubenswrapper[4779]: E0929 19:28:25.891996 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"31b2408c12909ab2b56be18b2d325eb22c5886593a8b4fab829bb77220715a89\": container with ID starting with 31b2408c12909ab2b56be18b2d325eb22c5886593a8b4fab829bb77220715a89 not found: ID does not exist" containerID="31b2408c12909ab2b56be18b2d325eb22c5886593a8b4fab829bb77220715a89" Sep 29 19:28:25 crc kubenswrapper[4779]: I0929 19:28:25.892047 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"31b2408c12909ab2b56be18b2d325eb22c5886593a8b4fab829bb77220715a89"} err="failed to get container status \"31b2408c12909ab2b56be18b2d325eb22c5886593a8b4fab829bb77220715a89\": rpc error: code = NotFound desc = could not find container \"31b2408c12909ab2b56be18b2d325eb22c5886593a8b4fab829bb77220715a89\": container with ID starting with 31b2408c12909ab2b56be18b2d325eb22c5886593a8b4fab829bb77220715a89 not found: ID does not exist" Sep 29 19:28:25 crc kubenswrapper[4779]: I0929 19:28:25.892080 4779 scope.go:117] "RemoveContainer" containerID="936013deef010775fc421a3b46f44c920cde7d367f7f0b511b8fb19206c8192f" Sep 29 19:28:25 crc kubenswrapper[4779]: I0929 19:28:25.892492 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"936013deef010775fc421a3b46f44c920cde7d367f7f0b511b8fb19206c8192f"} err="failed to get container status \"936013deef010775fc421a3b46f44c920cde7d367f7f0b511b8fb19206c8192f\": rpc error: code = NotFound desc = could not find container \"936013deef010775fc421a3b46f44c920cde7d367f7f0b511b8fb19206c8192f\": container with ID starting with 936013deef010775fc421a3b46f44c920cde7d367f7f0b511b8fb19206c8192f not found: ID does not exist" Sep 29 19:28:25 crc kubenswrapper[4779]: I0929 19:28:25.892543 4779 scope.go:117] "RemoveContainer" containerID="0ab94c689ec3d4d4fd907cc168d20f42a962861ce4ea400e98d33908d59e0e14" Sep 29 19:28:25 crc kubenswrapper[4779]: I0929 19:28:25.892942 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0ab94c689ec3d4d4fd907cc168d20f42a962861ce4ea400e98d33908d59e0e14"} err="failed to get container status \"0ab94c689ec3d4d4fd907cc168d20f42a962861ce4ea400e98d33908d59e0e14\": rpc error: code = NotFound desc = could not find container \"0ab94c689ec3d4d4fd907cc168d20f42a962861ce4ea400e98d33908d59e0e14\": container with ID starting with 0ab94c689ec3d4d4fd907cc168d20f42a962861ce4ea400e98d33908d59e0e14 not found: ID does not exist" Sep 29 19:28:25 crc kubenswrapper[4779]: I0929 19:28:25.892990 4779 scope.go:117] "RemoveContainer" containerID="3c2d35bddce92a8b9c9eff36f77354c7df98556a9a8a6ef29537a757aeaf4023" Sep 29 19:28:25 crc kubenswrapper[4779]: I0929 19:28:25.893399 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3c2d35bddce92a8b9c9eff36f77354c7df98556a9a8a6ef29537a757aeaf4023"} err="failed to get container status \"3c2d35bddce92a8b9c9eff36f77354c7df98556a9a8a6ef29537a757aeaf4023\": rpc error: code = NotFound desc = could not find container \"3c2d35bddce92a8b9c9eff36f77354c7df98556a9a8a6ef29537a757aeaf4023\": container with ID starting with 3c2d35bddce92a8b9c9eff36f77354c7df98556a9a8a6ef29537a757aeaf4023 not found: ID does not exist" Sep 29 19:28:25 crc kubenswrapper[4779]: I0929 19:28:25.893431 4779 scope.go:117] "RemoveContainer" containerID="31b2408c12909ab2b56be18b2d325eb22c5886593a8b4fab829bb77220715a89" Sep 29 19:28:25 crc kubenswrapper[4779]: I0929 19:28:25.893773 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"31b2408c12909ab2b56be18b2d325eb22c5886593a8b4fab829bb77220715a89"} err="failed to get container status \"31b2408c12909ab2b56be18b2d325eb22c5886593a8b4fab829bb77220715a89\": rpc error: code = NotFound desc = could not find container \"31b2408c12909ab2b56be18b2d325eb22c5886593a8b4fab829bb77220715a89\": container with ID starting with 31b2408c12909ab2b56be18b2d325eb22c5886593a8b4fab829bb77220715a89 not found: ID does not exist" Sep 29 19:28:25 crc kubenswrapper[4779]: I0929 19:28:25.893824 4779 scope.go:117] "RemoveContainer" containerID="936013deef010775fc421a3b46f44c920cde7d367f7f0b511b8fb19206c8192f" Sep 29 19:28:25 crc kubenswrapper[4779]: I0929 19:28:25.894388 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"936013deef010775fc421a3b46f44c920cde7d367f7f0b511b8fb19206c8192f"} err="failed to get container status \"936013deef010775fc421a3b46f44c920cde7d367f7f0b511b8fb19206c8192f\": rpc error: code = NotFound desc = could not find container \"936013deef010775fc421a3b46f44c920cde7d367f7f0b511b8fb19206c8192f\": container with ID starting with 936013deef010775fc421a3b46f44c920cde7d367f7f0b511b8fb19206c8192f not found: ID does not exist" Sep 29 19:28:25 crc kubenswrapper[4779]: I0929 19:28:25.894421 4779 scope.go:117] "RemoveContainer" containerID="0ab94c689ec3d4d4fd907cc168d20f42a962861ce4ea400e98d33908d59e0e14" Sep 29 19:28:25 crc kubenswrapper[4779]: I0929 19:28:25.894740 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0ab94c689ec3d4d4fd907cc168d20f42a962861ce4ea400e98d33908d59e0e14"} err="failed to get container status \"0ab94c689ec3d4d4fd907cc168d20f42a962861ce4ea400e98d33908d59e0e14\": rpc error: code = NotFound desc = could not find container \"0ab94c689ec3d4d4fd907cc168d20f42a962861ce4ea400e98d33908d59e0e14\": container with ID starting with 0ab94c689ec3d4d4fd907cc168d20f42a962861ce4ea400e98d33908d59e0e14 not found: ID does not exist" Sep 29 19:28:25 crc kubenswrapper[4779]: I0929 19:28:25.894809 4779 scope.go:117] "RemoveContainer" containerID="3c2d35bddce92a8b9c9eff36f77354c7df98556a9a8a6ef29537a757aeaf4023" Sep 29 19:28:25 crc kubenswrapper[4779]: I0929 19:28:25.895158 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3c2d35bddce92a8b9c9eff36f77354c7df98556a9a8a6ef29537a757aeaf4023"} err="failed to get container status \"3c2d35bddce92a8b9c9eff36f77354c7df98556a9a8a6ef29537a757aeaf4023\": rpc error: code = NotFound desc = could not find container \"3c2d35bddce92a8b9c9eff36f77354c7df98556a9a8a6ef29537a757aeaf4023\": container with ID starting with 3c2d35bddce92a8b9c9eff36f77354c7df98556a9a8a6ef29537a757aeaf4023 not found: ID does not exist" Sep 29 19:28:25 crc kubenswrapper[4779]: I0929 19:28:25.895200 4779 scope.go:117] "RemoveContainer" containerID="31b2408c12909ab2b56be18b2d325eb22c5886593a8b4fab829bb77220715a89" Sep 29 19:28:25 crc kubenswrapper[4779]: I0929 19:28:25.895585 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"31b2408c12909ab2b56be18b2d325eb22c5886593a8b4fab829bb77220715a89"} err="failed to get container status \"31b2408c12909ab2b56be18b2d325eb22c5886593a8b4fab829bb77220715a89\": rpc error: code = NotFound desc = could not find container \"31b2408c12909ab2b56be18b2d325eb22c5886593a8b4fab829bb77220715a89\": container with ID starting with 31b2408c12909ab2b56be18b2d325eb22c5886593a8b4fab829bb77220715a89 not found: ID does not exist" Sep 29 19:28:25 crc kubenswrapper[4779]: I0929 19:28:25.895613 4779 scope.go:117] "RemoveContainer" containerID="936013deef010775fc421a3b46f44c920cde7d367f7f0b511b8fb19206c8192f" Sep 29 19:28:25 crc kubenswrapper[4779]: I0929 19:28:25.895944 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"936013deef010775fc421a3b46f44c920cde7d367f7f0b511b8fb19206c8192f"} err="failed to get container status \"936013deef010775fc421a3b46f44c920cde7d367f7f0b511b8fb19206c8192f\": rpc error: code = NotFound desc = could not find container \"936013deef010775fc421a3b46f44c920cde7d367f7f0b511b8fb19206c8192f\": container with ID starting with 936013deef010775fc421a3b46f44c920cde7d367f7f0b511b8fb19206c8192f not found: ID does not exist" Sep 29 19:28:25 crc kubenswrapper[4779]: I0929 19:28:25.895983 4779 scope.go:117] "RemoveContainer" containerID="0ab94c689ec3d4d4fd907cc168d20f42a962861ce4ea400e98d33908d59e0e14" Sep 29 19:28:25 crc kubenswrapper[4779]: I0929 19:28:25.896312 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0ab94c689ec3d4d4fd907cc168d20f42a962861ce4ea400e98d33908d59e0e14"} err="failed to get container status \"0ab94c689ec3d4d4fd907cc168d20f42a962861ce4ea400e98d33908d59e0e14\": rpc error: code = NotFound desc = could not find container \"0ab94c689ec3d4d4fd907cc168d20f42a962861ce4ea400e98d33908d59e0e14\": container with ID starting with 0ab94c689ec3d4d4fd907cc168d20f42a962861ce4ea400e98d33908d59e0e14 not found: ID does not exist" Sep 29 19:28:25 crc kubenswrapper[4779]: I0929 19:28:25.896359 4779 scope.go:117] "RemoveContainer" containerID="3c2d35bddce92a8b9c9eff36f77354c7df98556a9a8a6ef29537a757aeaf4023" Sep 29 19:28:25 crc kubenswrapper[4779]: I0929 19:28:25.896686 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3c2d35bddce92a8b9c9eff36f77354c7df98556a9a8a6ef29537a757aeaf4023"} err="failed to get container status \"3c2d35bddce92a8b9c9eff36f77354c7df98556a9a8a6ef29537a757aeaf4023\": rpc error: code = NotFound desc = could not find container \"3c2d35bddce92a8b9c9eff36f77354c7df98556a9a8a6ef29537a757aeaf4023\": container with ID starting with 3c2d35bddce92a8b9c9eff36f77354c7df98556a9a8a6ef29537a757aeaf4023 not found: ID does not exist" Sep 29 19:28:25 crc kubenswrapper[4779]: I0929 19:28:25.896737 4779 scope.go:117] "RemoveContainer" containerID="31b2408c12909ab2b56be18b2d325eb22c5886593a8b4fab829bb77220715a89" Sep 29 19:28:25 crc kubenswrapper[4779]: I0929 19:28:25.897077 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"31b2408c12909ab2b56be18b2d325eb22c5886593a8b4fab829bb77220715a89"} err="failed to get container status \"31b2408c12909ab2b56be18b2d325eb22c5886593a8b4fab829bb77220715a89\": rpc error: code = NotFound desc = could not find container \"31b2408c12909ab2b56be18b2d325eb22c5886593a8b4fab829bb77220715a89\": container with ID starting with 31b2408c12909ab2b56be18b2d325eb22c5886593a8b4fab829bb77220715a89 not found: ID does not exist" Sep 29 19:28:26 crc kubenswrapper[4779]: I0929 19:28:26.025208 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Sep 29 19:28:26 crc kubenswrapper[4779]: I0929 19:28:26.038459 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Sep 29 19:28:26 crc kubenswrapper[4779]: I0929 19:28:26.057166 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Sep 29 19:28:26 crc kubenswrapper[4779]: E0929 19:28:26.059897 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="838e3b9e-959b-4b60-ba2b-4a81f2449f60" containerName="ceilometer-notification-agent" Sep 29 19:28:26 crc kubenswrapper[4779]: I0929 19:28:26.059946 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="838e3b9e-959b-4b60-ba2b-4a81f2449f60" containerName="ceilometer-notification-agent" Sep 29 19:28:26 crc kubenswrapper[4779]: E0929 19:28:26.059967 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="838e3b9e-959b-4b60-ba2b-4a81f2449f60" containerName="ceilometer-central-agent" Sep 29 19:28:26 crc kubenswrapper[4779]: I0929 19:28:26.059978 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="838e3b9e-959b-4b60-ba2b-4a81f2449f60" containerName="ceilometer-central-agent" Sep 29 19:28:26 crc kubenswrapper[4779]: E0929 19:28:26.060014 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="838e3b9e-959b-4b60-ba2b-4a81f2449f60" containerName="proxy-httpd" Sep 29 19:28:26 crc kubenswrapper[4779]: I0929 19:28:26.060024 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="838e3b9e-959b-4b60-ba2b-4a81f2449f60" containerName="proxy-httpd" Sep 29 19:28:26 crc kubenswrapper[4779]: E0929 19:28:26.060054 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="838e3b9e-959b-4b60-ba2b-4a81f2449f60" containerName="sg-core" Sep 29 19:28:26 crc kubenswrapper[4779]: I0929 19:28:26.060062 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="838e3b9e-959b-4b60-ba2b-4a81f2449f60" containerName="sg-core" Sep 29 19:28:26 crc kubenswrapper[4779]: I0929 19:28:26.060275 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="838e3b9e-959b-4b60-ba2b-4a81f2449f60" containerName="ceilometer-notification-agent" Sep 29 19:28:26 crc kubenswrapper[4779]: I0929 19:28:26.060293 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="838e3b9e-959b-4b60-ba2b-4a81f2449f60" containerName="sg-core" Sep 29 19:28:26 crc kubenswrapper[4779]: I0929 19:28:26.060332 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="838e3b9e-959b-4b60-ba2b-4a81f2449f60" containerName="proxy-httpd" Sep 29 19:28:26 crc kubenswrapper[4779]: I0929 19:28:26.060358 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="838e3b9e-959b-4b60-ba2b-4a81f2449f60" containerName="ceilometer-central-agent" Sep 29 19:28:26 crc kubenswrapper[4779]: I0929 19:28:26.062390 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 19:28:26 crc kubenswrapper[4779]: I0929 19:28:26.065652 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Sep 29 19:28:26 crc kubenswrapper[4779]: I0929 19:28:26.068579 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Sep 29 19:28:26 crc kubenswrapper[4779]: I0929 19:28:26.068675 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Sep 29 19:28:26 crc kubenswrapper[4779]: I0929 19:28:26.077673 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 29 19:28:26 crc kubenswrapper[4779]: I0929 19:28:26.149588 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/4a704332-3e25-4a05-80e2-4eaea9b72aa4-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"4a704332-3e25-4a05-80e2-4eaea9b72aa4\") " pod="openstack/ceilometer-0" Sep 29 19:28:26 crc kubenswrapper[4779]: I0929 19:28:26.149649 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gj7q8\" (UniqueName: \"kubernetes.io/projected/4a704332-3e25-4a05-80e2-4eaea9b72aa4-kube-api-access-gj7q8\") pod \"ceilometer-0\" (UID: \"4a704332-3e25-4a05-80e2-4eaea9b72aa4\") " pod="openstack/ceilometer-0" Sep 29 19:28:26 crc kubenswrapper[4779]: I0929 19:28:26.149750 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4a704332-3e25-4a05-80e2-4eaea9b72aa4-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"4a704332-3e25-4a05-80e2-4eaea9b72aa4\") " pod="openstack/ceilometer-0" Sep 29 19:28:26 crc kubenswrapper[4779]: I0929 19:28:26.149785 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4a704332-3e25-4a05-80e2-4eaea9b72aa4-config-data\") pod \"ceilometer-0\" (UID: \"4a704332-3e25-4a05-80e2-4eaea9b72aa4\") " pod="openstack/ceilometer-0" Sep 29 19:28:26 crc kubenswrapper[4779]: I0929 19:28:26.149812 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/4a704332-3e25-4a05-80e2-4eaea9b72aa4-run-httpd\") pod \"ceilometer-0\" (UID: \"4a704332-3e25-4a05-80e2-4eaea9b72aa4\") " pod="openstack/ceilometer-0" Sep 29 19:28:26 crc kubenswrapper[4779]: I0929 19:28:26.149832 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/4a704332-3e25-4a05-80e2-4eaea9b72aa4-log-httpd\") pod \"ceilometer-0\" (UID: \"4a704332-3e25-4a05-80e2-4eaea9b72aa4\") " pod="openstack/ceilometer-0" Sep 29 19:28:26 crc kubenswrapper[4779]: I0929 19:28:26.150009 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4a704332-3e25-4a05-80e2-4eaea9b72aa4-scripts\") pod \"ceilometer-0\" (UID: \"4a704332-3e25-4a05-80e2-4eaea9b72aa4\") " pod="openstack/ceilometer-0" Sep 29 19:28:26 crc kubenswrapper[4779]: I0929 19:28:26.150042 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/4a704332-3e25-4a05-80e2-4eaea9b72aa4-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"4a704332-3e25-4a05-80e2-4eaea9b72aa4\") " pod="openstack/ceilometer-0" Sep 29 19:28:26 crc kubenswrapper[4779]: I0929 19:28:26.253094 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4a704332-3e25-4a05-80e2-4eaea9b72aa4-scripts\") pod \"ceilometer-0\" (UID: \"4a704332-3e25-4a05-80e2-4eaea9b72aa4\") " pod="openstack/ceilometer-0" Sep 29 19:28:26 crc kubenswrapper[4779]: I0929 19:28:26.253154 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/4a704332-3e25-4a05-80e2-4eaea9b72aa4-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"4a704332-3e25-4a05-80e2-4eaea9b72aa4\") " pod="openstack/ceilometer-0" Sep 29 19:28:26 crc kubenswrapper[4779]: I0929 19:28:26.253185 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/4a704332-3e25-4a05-80e2-4eaea9b72aa4-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"4a704332-3e25-4a05-80e2-4eaea9b72aa4\") " pod="openstack/ceilometer-0" Sep 29 19:28:26 crc kubenswrapper[4779]: I0929 19:28:26.253221 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gj7q8\" (UniqueName: \"kubernetes.io/projected/4a704332-3e25-4a05-80e2-4eaea9b72aa4-kube-api-access-gj7q8\") pod \"ceilometer-0\" (UID: \"4a704332-3e25-4a05-80e2-4eaea9b72aa4\") " pod="openstack/ceilometer-0" Sep 29 19:28:26 crc kubenswrapper[4779]: I0929 19:28:26.253283 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4a704332-3e25-4a05-80e2-4eaea9b72aa4-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"4a704332-3e25-4a05-80e2-4eaea9b72aa4\") " pod="openstack/ceilometer-0" Sep 29 19:28:26 crc kubenswrapper[4779]: I0929 19:28:26.253313 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4a704332-3e25-4a05-80e2-4eaea9b72aa4-config-data\") pod \"ceilometer-0\" (UID: \"4a704332-3e25-4a05-80e2-4eaea9b72aa4\") " pod="openstack/ceilometer-0" Sep 29 19:28:26 crc kubenswrapper[4779]: I0929 19:28:26.253360 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/4a704332-3e25-4a05-80e2-4eaea9b72aa4-run-httpd\") pod \"ceilometer-0\" (UID: \"4a704332-3e25-4a05-80e2-4eaea9b72aa4\") " pod="openstack/ceilometer-0" Sep 29 19:28:26 crc kubenswrapper[4779]: I0929 19:28:26.253380 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/4a704332-3e25-4a05-80e2-4eaea9b72aa4-log-httpd\") pod \"ceilometer-0\" (UID: \"4a704332-3e25-4a05-80e2-4eaea9b72aa4\") " pod="openstack/ceilometer-0" Sep 29 19:28:26 crc kubenswrapper[4779]: I0929 19:28:26.253840 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/4a704332-3e25-4a05-80e2-4eaea9b72aa4-log-httpd\") pod \"ceilometer-0\" (UID: \"4a704332-3e25-4a05-80e2-4eaea9b72aa4\") " pod="openstack/ceilometer-0" Sep 29 19:28:26 crc kubenswrapper[4779]: I0929 19:28:26.254219 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/4a704332-3e25-4a05-80e2-4eaea9b72aa4-run-httpd\") pod \"ceilometer-0\" (UID: \"4a704332-3e25-4a05-80e2-4eaea9b72aa4\") " pod="openstack/ceilometer-0" Sep 29 19:28:26 crc kubenswrapper[4779]: I0929 19:28:26.256588 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/4a704332-3e25-4a05-80e2-4eaea9b72aa4-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"4a704332-3e25-4a05-80e2-4eaea9b72aa4\") " pod="openstack/ceilometer-0" Sep 29 19:28:26 crc kubenswrapper[4779]: I0929 19:28:26.256695 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4a704332-3e25-4a05-80e2-4eaea9b72aa4-scripts\") pod \"ceilometer-0\" (UID: \"4a704332-3e25-4a05-80e2-4eaea9b72aa4\") " pod="openstack/ceilometer-0" Sep 29 19:28:26 crc kubenswrapper[4779]: I0929 19:28:26.257482 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/4a704332-3e25-4a05-80e2-4eaea9b72aa4-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"4a704332-3e25-4a05-80e2-4eaea9b72aa4\") " pod="openstack/ceilometer-0" Sep 29 19:28:26 crc kubenswrapper[4779]: I0929 19:28:26.258433 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4a704332-3e25-4a05-80e2-4eaea9b72aa4-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"4a704332-3e25-4a05-80e2-4eaea9b72aa4\") " pod="openstack/ceilometer-0" Sep 29 19:28:26 crc kubenswrapper[4779]: I0929 19:28:26.266185 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4a704332-3e25-4a05-80e2-4eaea9b72aa4-config-data\") pod \"ceilometer-0\" (UID: \"4a704332-3e25-4a05-80e2-4eaea9b72aa4\") " pod="openstack/ceilometer-0" Sep 29 19:28:26 crc kubenswrapper[4779]: I0929 19:28:26.269196 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gj7q8\" (UniqueName: \"kubernetes.io/projected/4a704332-3e25-4a05-80e2-4eaea9b72aa4-kube-api-access-gj7q8\") pod \"ceilometer-0\" (UID: \"4a704332-3e25-4a05-80e2-4eaea9b72aa4\") " pod="openstack/ceilometer-0" Sep 29 19:28:26 crc kubenswrapper[4779]: I0929 19:28:26.377768 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 19:28:26 crc kubenswrapper[4779]: I0929 19:28:26.614611 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Sep 29 19:28:26 crc kubenswrapper[4779]: I0929 19:28:26.934799 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Sep 29 19:28:27 crc kubenswrapper[4779]: I0929 19:28:27.303266 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Sep 29 19:28:27 crc kubenswrapper[4779]: I0929 19:28:27.372907 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b1327ac7-e940-435f-b344-f03e2c406800-logs\") pod \"b1327ac7-e940-435f-b344-f03e2c406800\" (UID: \"b1327ac7-e940-435f-b344-f03e2c406800\") " Sep 29 19:28:27 crc kubenswrapper[4779]: I0929 19:28:27.372959 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b1327ac7-e940-435f-b344-f03e2c406800-combined-ca-bundle\") pod \"b1327ac7-e940-435f-b344-f03e2c406800\" (UID: \"b1327ac7-e940-435f-b344-f03e2c406800\") " Sep 29 19:28:27 crc kubenswrapper[4779]: I0929 19:28:27.373115 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-krnhq\" (UniqueName: \"kubernetes.io/projected/b1327ac7-e940-435f-b344-f03e2c406800-kube-api-access-krnhq\") pod \"b1327ac7-e940-435f-b344-f03e2c406800\" (UID: \"b1327ac7-e940-435f-b344-f03e2c406800\") " Sep 29 19:28:27 crc kubenswrapper[4779]: I0929 19:28:27.373163 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b1327ac7-e940-435f-b344-f03e2c406800-config-data\") pod \"b1327ac7-e940-435f-b344-f03e2c406800\" (UID: \"b1327ac7-e940-435f-b344-f03e2c406800\") " Sep 29 19:28:27 crc kubenswrapper[4779]: I0929 19:28:27.373560 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b1327ac7-e940-435f-b344-f03e2c406800-logs" (OuterVolumeSpecName: "logs") pod "b1327ac7-e940-435f-b344-f03e2c406800" (UID: "b1327ac7-e940-435f-b344-f03e2c406800"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 19:28:27 crc kubenswrapper[4779]: I0929 19:28:27.373746 4779 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b1327ac7-e940-435f-b344-f03e2c406800-logs\") on node \"crc\" DevicePath \"\"" Sep 29 19:28:27 crc kubenswrapper[4779]: I0929 19:28:27.390272 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b1327ac7-e940-435f-b344-f03e2c406800-kube-api-access-krnhq" (OuterVolumeSpecName: "kube-api-access-krnhq") pod "b1327ac7-e940-435f-b344-f03e2c406800" (UID: "b1327ac7-e940-435f-b344-f03e2c406800"). InnerVolumeSpecName "kube-api-access-krnhq". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:28:27 crc kubenswrapper[4779]: I0929 19:28:27.417530 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b1327ac7-e940-435f-b344-f03e2c406800-config-data" (OuterVolumeSpecName: "config-data") pod "b1327ac7-e940-435f-b344-f03e2c406800" (UID: "b1327ac7-e940-435f-b344-f03e2c406800"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:28:27 crc kubenswrapper[4779]: I0929 19:28:27.434514 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b1327ac7-e940-435f-b344-f03e2c406800-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b1327ac7-e940-435f-b344-f03e2c406800" (UID: "b1327ac7-e940-435f-b344-f03e2c406800"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:28:27 crc kubenswrapper[4779]: I0929 19:28:27.475947 4779 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b1327ac7-e940-435f-b344-f03e2c406800-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 19:28:27 crc kubenswrapper[4779]: I0929 19:28:27.475987 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-krnhq\" (UniqueName: \"kubernetes.io/projected/b1327ac7-e940-435f-b344-f03e2c406800-kube-api-access-krnhq\") on node \"crc\" DevicePath \"\"" Sep 29 19:28:27 crc kubenswrapper[4779]: I0929 19:28:27.476003 4779 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b1327ac7-e940-435f-b344-f03e2c406800-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 19:28:27 crc kubenswrapper[4779]: I0929 19:28:27.733640 4779 generic.go:334] "Generic (PLEG): container finished" podID="b1327ac7-e940-435f-b344-f03e2c406800" containerID="a866ab138804053e3ed6b09ce2acbbb4e5f81198b6cd3ff405153f855c13d85b" exitCode=0 Sep 29 19:28:27 crc kubenswrapper[4779]: I0929 19:28:27.733820 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Sep 29 19:28:27 crc kubenswrapper[4779]: I0929 19:28:27.735423 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"b1327ac7-e940-435f-b344-f03e2c406800","Type":"ContainerDied","Data":"a866ab138804053e3ed6b09ce2acbbb4e5f81198b6cd3ff405153f855c13d85b"} Sep 29 19:28:27 crc kubenswrapper[4779]: I0929 19:28:27.735455 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"b1327ac7-e940-435f-b344-f03e2c406800","Type":"ContainerDied","Data":"7e71dfdb50f23a30d8eaf745259e08c91b7be23a0fef2b53e824a915bc81497b"} Sep 29 19:28:27 crc kubenswrapper[4779]: I0929 19:28:27.735472 4779 scope.go:117] "RemoveContainer" containerID="a866ab138804053e3ed6b09ce2acbbb4e5f81198b6cd3ff405153f855c13d85b" Sep 29 19:28:27 crc kubenswrapper[4779]: I0929 19:28:27.736498 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"4a704332-3e25-4a05-80e2-4eaea9b72aa4","Type":"ContainerStarted","Data":"fc6cf3b29cc95a3683f7569def345855ca3222ebae2ca52fb284f3f931a6d0c0"} Sep 29 19:28:27 crc kubenswrapper[4779]: I0929 19:28:27.780687 4779 scope.go:117] "RemoveContainer" containerID="1398aa74149e59d441ba14ca6f41790588a126f093f60a05b30a0b0d73e57d18" Sep 29 19:28:27 crc kubenswrapper[4779]: I0929 19:28:27.786663 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="838e3b9e-959b-4b60-ba2b-4a81f2449f60" path="/var/lib/kubelet/pods/838e3b9e-959b-4b60-ba2b-4a81f2449f60/volumes" Sep 29 19:28:27 crc kubenswrapper[4779]: I0929 19:28:27.787580 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Sep 29 19:28:27 crc kubenswrapper[4779]: I0929 19:28:27.787610 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Sep 29 19:28:27 crc kubenswrapper[4779]: I0929 19:28:27.796183 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Sep 29 19:28:27 crc kubenswrapper[4779]: E0929 19:28:27.796617 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b1327ac7-e940-435f-b344-f03e2c406800" containerName="nova-api-log" Sep 29 19:28:27 crc kubenswrapper[4779]: I0929 19:28:27.796633 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="b1327ac7-e940-435f-b344-f03e2c406800" containerName="nova-api-log" Sep 29 19:28:27 crc kubenswrapper[4779]: E0929 19:28:27.796668 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b1327ac7-e940-435f-b344-f03e2c406800" containerName="nova-api-api" Sep 29 19:28:27 crc kubenswrapper[4779]: I0929 19:28:27.796675 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="b1327ac7-e940-435f-b344-f03e2c406800" containerName="nova-api-api" Sep 29 19:28:27 crc kubenswrapper[4779]: I0929 19:28:27.796832 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="b1327ac7-e940-435f-b344-f03e2c406800" containerName="nova-api-api" Sep 29 19:28:27 crc kubenswrapper[4779]: I0929 19:28:27.796845 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="b1327ac7-e940-435f-b344-f03e2c406800" containerName="nova-api-log" Sep 29 19:28:27 crc kubenswrapper[4779]: I0929 19:28:27.797833 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Sep 29 19:28:27 crc kubenswrapper[4779]: I0929 19:28:27.799964 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Sep 29 19:28:27 crc kubenswrapper[4779]: I0929 19:28:27.800097 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-public-svc" Sep 29 19:28:27 crc kubenswrapper[4779]: I0929 19:28:27.805380 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Sep 29 19:28:27 crc kubenswrapper[4779]: I0929 19:28:27.806365 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-internal-svc" Sep 29 19:28:27 crc kubenswrapper[4779]: I0929 19:28:27.808114 4779 scope.go:117] "RemoveContainer" containerID="a866ab138804053e3ed6b09ce2acbbb4e5f81198b6cd3ff405153f855c13d85b" Sep 29 19:28:27 crc kubenswrapper[4779]: E0929 19:28:27.808526 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a866ab138804053e3ed6b09ce2acbbb4e5f81198b6cd3ff405153f855c13d85b\": container with ID starting with a866ab138804053e3ed6b09ce2acbbb4e5f81198b6cd3ff405153f855c13d85b not found: ID does not exist" containerID="a866ab138804053e3ed6b09ce2acbbb4e5f81198b6cd3ff405153f855c13d85b" Sep 29 19:28:27 crc kubenswrapper[4779]: I0929 19:28:27.808557 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a866ab138804053e3ed6b09ce2acbbb4e5f81198b6cd3ff405153f855c13d85b"} err="failed to get container status \"a866ab138804053e3ed6b09ce2acbbb4e5f81198b6cd3ff405153f855c13d85b\": rpc error: code = NotFound desc = could not find container \"a866ab138804053e3ed6b09ce2acbbb4e5f81198b6cd3ff405153f855c13d85b\": container with ID starting with a866ab138804053e3ed6b09ce2acbbb4e5f81198b6cd3ff405153f855c13d85b not found: ID does not exist" Sep 29 19:28:27 crc kubenswrapper[4779]: I0929 19:28:27.808580 4779 scope.go:117] "RemoveContainer" containerID="1398aa74149e59d441ba14ca6f41790588a126f093f60a05b30a0b0d73e57d18" Sep 29 19:28:27 crc kubenswrapper[4779]: E0929 19:28:27.808907 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1398aa74149e59d441ba14ca6f41790588a126f093f60a05b30a0b0d73e57d18\": container with ID starting with 1398aa74149e59d441ba14ca6f41790588a126f093f60a05b30a0b0d73e57d18 not found: ID does not exist" containerID="1398aa74149e59d441ba14ca6f41790588a126f093f60a05b30a0b0d73e57d18" Sep 29 19:28:27 crc kubenswrapper[4779]: I0929 19:28:27.808923 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1398aa74149e59d441ba14ca6f41790588a126f093f60a05b30a0b0d73e57d18"} err="failed to get container status \"1398aa74149e59d441ba14ca6f41790588a126f093f60a05b30a0b0d73e57d18\": rpc error: code = NotFound desc = could not find container \"1398aa74149e59d441ba14ca6f41790588a126f093f60a05b30a0b0d73e57d18\": container with ID starting with 1398aa74149e59d441ba14ca6f41790588a126f093f60a05b30a0b0d73e57d18 not found: ID does not exist" Sep 29 19:28:27 crc kubenswrapper[4779]: I0929 19:28:27.882927 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t9rzc\" (UniqueName: \"kubernetes.io/projected/5d230bec-c78b-45a3-b334-7353e1a8b827-kube-api-access-t9rzc\") pod \"nova-api-0\" (UID: \"5d230bec-c78b-45a3-b334-7353e1a8b827\") " pod="openstack/nova-api-0" Sep 29 19:28:27 crc kubenswrapper[4779]: I0929 19:28:27.883024 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/5d230bec-c78b-45a3-b334-7353e1a8b827-internal-tls-certs\") pod \"nova-api-0\" (UID: \"5d230bec-c78b-45a3-b334-7353e1a8b827\") " pod="openstack/nova-api-0" Sep 29 19:28:27 crc kubenswrapper[4779]: I0929 19:28:27.883077 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5d230bec-c78b-45a3-b334-7353e1a8b827-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"5d230bec-c78b-45a3-b334-7353e1a8b827\") " pod="openstack/nova-api-0" Sep 29 19:28:27 crc kubenswrapper[4779]: I0929 19:28:27.883100 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5d230bec-c78b-45a3-b334-7353e1a8b827-config-data\") pod \"nova-api-0\" (UID: \"5d230bec-c78b-45a3-b334-7353e1a8b827\") " pod="openstack/nova-api-0" Sep 29 19:28:27 crc kubenswrapper[4779]: I0929 19:28:27.883122 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5d230bec-c78b-45a3-b334-7353e1a8b827-logs\") pod \"nova-api-0\" (UID: \"5d230bec-c78b-45a3-b334-7353e1a8b827\") " pod="openstack/nova-api-0" Sep 29 19:28:27 crc kubenswrapper[4779]: I0929 19:28:27.883202 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/5d230bec-c78b-45a3-b334-7353e1a8b827-public-tls-certs\") pod \"nova-api-0\" (UID: \"5d230bec-c78b-45a3-b334-7353e1a8b827\") " pod="openstack/nova-api-0" Sep 29 19:28:27 crc kubenswrapper[4779]: I0929 19:28:27.985186 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/5d230bec-c78b-45a3-b334-7353e1a8b827-internal-tls-certs\") pod \"nova-api-0\" (UID: \"5d230bec-c78b-45a3-b334-7353e1a8b827\") " pod="openstack/nova-api-0" Sep 29 19:28:27 crc kubenswrapper[4779]: I0929 19:28:27.985265 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5d230bec-c78b-45a3-b334-7353e1a8b827-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"5d230bec-c78b-45a3-b334-7353e1a8b827\") " pod="openstack/nova-api-0" Sep 29 19:28:27 crc kubenswrapper[4779]: I0929 19:28:27.985289 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5d230bec-c78b-45a3-b334-7353e1a8b827-config-data\") pod \"nova-api-0\" (UID: \"5d230bec-c78b-45a3-b334-7353e1a8b827\") " pod="openstack/nova-api-0" Sep 29 19:28:27 crc kubenswrapper[4779]: I0929 19:28:27.985313 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5d230bec-c78b-45a3-b334-7353e1a8b827-logs\") pod \"nova-api-0\" (UID: \"5d230bec-c78b-45a3-b334-7353e1a8b827\") " pod="openstack/nova-api-0" Sep 29 19:28:27 crc kubenswrapper[4779]: I0929 19:28:27.985449 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/5d230bec-c78b-45a3-b334-7353e1a8b827-public-tls-certs\") pod \"nova-api-0\" (UID: \"5d230bec-c78b-45a3-b334-7353e1a8b827\") " pod="openstack/nova-api-0" Sep 29 19:28:27 crc kubenswrapper[4779]: I0929 19:28:27.985500 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t9rzc\" (UniqueName: \"kubernetes.io/projected/5d230bec-c78b-45a3-b334-7353e1a8b827-kube-api-access-t9rzc\") pod \"nova-api-0\" (UID: \"5d230bec-c78b-45a3-b334-7353e1a8b827\") " pod="openstack/nova-api-0" Sep 29 19:28:27 crc kubenswrapper[4779]: I0929 19:28:27.986437 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5d230bec-c78b-45a3-b334-7353e1a8b827-logs\") pod \"nova-api-0\" (UID: \"5d230bec-c78b-45a3-b334-7353e1a8b827\") " pod="openstack/nova-api-0" Sep 29 19:28:27 crc kubenswrapper[4779]: I0929 19:28:27.991294 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5d230bec-c78b-45a3-b334-7353e1a8b827-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"5d230bec-c78b-45a3-b334-7353e1a8b827\") " pod="openstack/nova-api-0" Sep 29 19:28:27 crc kubenswrapper[4779]: I0929 19:28:27.992151 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/5d230bec-c78b-45a3-b334-7353e1a8b827-public-tls-certs\") pod \"nova-api-0\" (UID: \"5d230bec-c78b-45a3-b334-7353e1a8b827\") " pod="openstack/nova-api-0" Sep 29 19:28:27 crc kubenswrapper[4779]: I0929 19:28:27.992897 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/5d230bec-c78b-45a3-b334-7353e1a8b827-internal-tls-certs\") pod \"nova-api-0\" (UID: \"5d230bec-c78b-45a3-b334-7353e1a8b827\") " pod="openstack/nova-api-0" Sep 29 19:28:28 crc kubenswrapper[4779]: I0929 19:28:28.002964 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5d230bec-c78b-45a3-b334-7353e1a8b827-config-data\") pod \"nova-api-0\" (UID: \"5d230bec-c78b-45a3-b334-7353e1a8b827\") " pod="openstack/nova-api-0" Sep 29 19:28:28 crc kubenswrapper[4779]: I0929 19:28:28.003314 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t9rzc\" (UniqueName: \"kubernetes.io/projected/5d230bec-c78b-45a3-b334-7353e1a8b827-kube-api-access-t9rzc\") pod \"nova-api-0\" (UID: \"5d230bec-c78b-45a3-b334-7353e1a8b827\") " pod="openstack/nova-api-0" Sep 29 19:28:28 crc kubenswrapper[4779]: I0929 19:28:28.115421 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Sep 29 19:28:28 crc kubenswrapper[4779]: I0929 19:28:28.556740 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Sep 29 19:28:28 crc kubenswrapper[4779]: W0929 19:28:28.560493 4779 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5d230bec_c78b_45a3_b334_7353e1a8b827.slice/crio-77f1a80ea93e649f5266de935e23ae03ab64237a826dbaef3381b8353c8e49d4 WatchSource:0}: Error finding container 77f1a80ea93e649f5266de935e23ae03ab64237a826dbaef3381b8353c8e49d4: Status 404 returned error can't find the container with id 77f1a80ea93e649f5266de935e23ae03ab64237a826dbaef3381b8353c8e49d4 Sep 29 19:28:28 crc kubenswrapper[4779]: I0929 19:28:28.746705 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"5d230bec-c78b-45a3-b334-7353e1a8b827","Type":"ContainerStarted","Data":"2524c2d6cb6ec9e6cc703878444fcbeabc26902f5d78c0587958ae3fcfcaef1c"} Sep 29 19:28:28 crc kubenswrapper[4779]: I0929 19:28:28.746943 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"5d230bec-c78b-45a3-b334-7353e1a8b827","Type":"ContainerStarted","Data":"77f1a80ea93e649f5266de935e23ae03ab64237a826dbaef3381b8353c8e49d4"} Sep 29 19:28:28 crc kubenswrapper[4779]: I0929 19:28:28.750094 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"4a704332-3e25-4a05-80e2-4eaea9b72aa4","Type":"ContainerStarted","Data":"a03a572857870256704b1297341be6089c53e604b6074f93721688bcbc91f3e0"} Sep 29 19:28:28 crc kubenswrapper[4779]: I0929 19:28:28.750128 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"4a704332-3e25-4a05-80e2-4eaea9b72aa4","Type":"ContainerStarted","Data":"1283e86dab134a0e1e85d42bd331349844cd4518321b992e5a1a1e9bb57b9564"} Sep 29 19:28:29 crc kubenswrapper[4779]: I0929 19:28:29.758427 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"5d230bec-c78b-45a3-b334-7353e1a8b827","Type":"ContainerStarted","Data":"8d731428ec5776bb1f092d864df7f42a40474736f89fdc8996ad7b72aa45ea4e"} Sep 29 19:28:29 crc kubenswrapper[4779]: I0929 19:28:29.787787 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b1327ac7-e940-435f-b344-f03e2c406800" path="/var/lib/kubelet/pods/b1327ac7-e940-435f-b344-f03e2c406800/volumes" Sep 29 19:28:29 crc kubenswrapper[4779]: I0929 19:28:29.788353 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"4a704332-3e25-4a05-80e2-4eaea9b72aa4","Type":"ContainerStarted","Data":"8f5fff182f0b5c91ef7a4724e798df98e5b0631a3fe5c198136d95cfda48fe98"} Sep 29 19:28:29 crc kubenswrapper[4779]: I0929 19:28:29.800648 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.800630159 podStartE2EDuration="2.800630159s" podCreationTimestamp="2025-09-29 19:28:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 19:28:29.788425045 +0000 UTC m=+1220.672850145" watchObservedRunningTime="2025-09-29 19:28:29.800630159 +0000 UTC m=+1220.685055259" Sep 29 19:28:31 crc kubenswrapper[4779]: I0929 19:28:31.154547 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-59cf4bdb65-mzp42" Sep 29 19:28:31 crc kubenswrapper[4779]: I0929 19:28:31.242623 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-845d6d6f59-5q9p4"] Sep 29 19:28:31 crc kubenswrapper[4779]: I0929 19:28:31.242909 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-845d6d6f59-5q9p4" podUID="bac34d02-e097-4b82-afda-9b2d885c6fa4" containerName="dnsmasq-dns" containerID="cri-o://8f77a64600ab6f18c1e7bc3108a6f83bc8926d553e2b6541c7f35f82ebf77a23" gracePeriod=10 Sep 29 19:28:31 crc kubenswrapper[4779]: I0929 19:28:31.662141 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-845d6d6f59-5q9p4" Sep 29 19:28:31 crc kubenswrapper[4779]: I0929 19:28:31.783588 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tvwpl\" (UniqueName: \"kubernetes.io/projected/bac34d02-e097-4b82-afda-9b2d885c6fa4-kube-api-access-tvwpl\") pod \"bac34d02-e097-4b82-afda-9b2d885c6fa4\" (UID: \"bac34d02-e097-4b82-afda-9b2d885c6fa4\") " Sep 29 19:28:31 crc kubenswrapper[4779]: I0929 19:28:31.783711 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/bac34d02-e097-4b82-afda-9b2d885c6fa4-ovsdbserver-nb\") pod \"bac34d02-e097-4b82-afda-9b2d885c6fa4\" (UID: \"bac34d02-e097-4b82-afda-9b2d885c6fa4\") " Sep 29 19:28:31 crc kubenswrapper[4779]: I0929 19:28:31.783747 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bac34d02-e097-4b82-afda-9b2d885c6fa4-config\") pod \"bac34d02-e097-4b82-afda-9b2d885c6fa4\" (UID: \"bac34d02-e097-4b82-afda-9b2d885c6fa4\") " Sep 29 19:28:31 crc kubenswrapper[4779]: I0929 19:28:31.783841 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/bac34d02-e097-4b82-afda-9b2d885c6fa4-ovsdbserver-sb\") pod \"bac34d02-e097-4b82-afda-9b2d885c6fa4\" (UID: \"bac34d02-e097-4b82-afda-9b2d885c6fa4\") " Sep 29 19:28:31 crc kubenswrapper[4779]: I0929 19:28:31.783911 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/bac34d02-e097-4b82-afda-9b2d885c6fa4-dns-svc\") pod \"bac34d02-e097-4b82-afda-9b2d885c6fa4\" (UID: \"bac34d02-e097-4b82-afda-9b2d885c6fa4\") " Sep 29 19:28:31 crc kubenswrapper[4779]: I0929 19:28:31.783976 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/bac34d02-e097-4b82-afda-9b2d885c6fa4-dns-swift-storage-0\") pod \"bac34d02-e097-4b82-afda-9b2d885c6fa4\" (UID: \"bac34d02-e097-4b82-afda-9b2d885c6fa4\") " Sep 29 19:28:31 crc kubenswrapper[4779]: I0929 19:28:31.788888 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bac34d02-e097-4b82-afda-9b2d885c6fa4-kube-api-access-tvwpl" (OuterVolumeSpecName: "kube-api-access-tvwpl") pod "bac34d02-e097-4b82-afda-9b2d885c6fa4" (UID: "bac34d02-e097-4b82-afda-9b2d885c6fa4"). InnerVolumeSpecName "kube-api-access-tvwpl". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:28:31 crc kubenswrapper[4779]: I0929 19:28:31.812515 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"4a704332-3e25-4a05-80e2-4eaea9b72aa4","Type":"ContainerStarted","Data":"a92e6905751a267a02f55215445832b87b8dc981c0fd433e605b24d614a72388"} Sep 29 19:28:31 crc kubenswrapper[4779]: I0929 19:28:31.812990 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="4a704332-3e25-4a05-80e2-4eaea9b72aa4" containerName="proxy-httpd" containerID="cri-o://a92e6905751a267a02f55215445832b87b8dc981c0fd433e605b24d614a72388" gracePeriod=30 Sep 29 19:28:31 crc kubenswrapper[4779]: I0929 19:28:31.812988 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="4a704332-3e25-4a05-80e2-4eaea9b72aa4" containerName="ceilometer-central-agent" containerID="cri-o://1283e86dab134a0e1e85d42bd331349844cd4518321b992e5a1a1e9bb57b9564" gracePeriod=30 Sep 29 19:28:31 crc kubenswrapper[4779]: I0929 19:28:31.813074 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="4a704332-3e25-4a05-80e2-4eaea9b72aa4" containerName="sg-core" containerID="cri-o://8f5fff182f0b5c91ef7a4724e798df98e5b0631a3fe5c198136d95cfda48fe98" gracePeriod=30 Sep 29 19:28:31 crc kubenswrapper[4779]: I0929 19:28:31.813226 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="4a704332-3e25-4a05-80e2-4eaea9b72aa4" containerName="ceilometer-notification-agent" containerID="cri-o://a03a572857870256704b1297341be6089c53e604b6074f93721688bcbc91f3e0" gracePeriod=30 Sep 29 19:28:31 crc kubenswrapper[4779]: I0929 19:28:31.819996 4779 generic.go:334] "Generic (PLEG): container finished" podID="bac34d02-e097-4b82-afda-9b2d885c6fa4" containerID="8f77a64600ab6f18c1e7bc3108a6f83bc8926d553e2b6541c7f35f82ebf77a23" exitCode=0 Sep 29 19:28:31 crc kubenswrapper[4779]: I0929 19:28:31.820033 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-845d6d6f59-5q9p4" event={"ID":"bac34d02-e097-4b82-afda-9b2d885c6fa4","Type":"ContainerDied","Data":"8f77a64600ab6f18c1e7bc3108a6f83bc8926d553e2b6541c7f35f82ebf77a23"} Sep 29 19:28:31 crc kubenswrapper[4779]: I0929 19:28:31.820062 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-845d6d6f59-5q9p4" event={"ID":"bac34d02-e097-4b82-afda-9b2d885c6fa4","Type":"ContainerDied","Data":"c2da79f17a717c0bd4b240fed4a4d7e090f1aee670d813eb54d0e3b66d632f8c"} Sep 29 19:28:31 crc kubenswrapper[4779]: I0929 19:28:31.820080 4779 scope.go:117] "RemoveContainer" containerID="8f77a64600ab6f18c1e7bc3108a6f83bc8926d553e2b6541c7f35f82ebf77a23" Sep 29 19:28:31 crc kubenswrapper[4779]: I0929 19:28:31.820223 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-845d6d6f59-5q9p4" Sep 29 19:28:31 crc kubenswrapper[4779]: I0929 19:28:31.850960 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=1.760548067 podStartE2EDuration="5.850941388s" podCreationTimestamp="2025-09-29 19:28:26 +0000 UTC" firstStartedPulling="2025-09-29 19:28:26.938281484 +0000 UTC m=+1217.822706604" lastFinishedPulling="2025-09-29 19:28:31.028674825 +0000 UTC m=+1221.913099925" observedRunningTime="2025-09-29 19:28:31.839935388 +0000 UTC m=+1222.724360488" watchObservedRunningTime="2025-09-29 19:28:31.850941388 +0000 UTC m=+1222.735366488" Sep 29 19:28:31 crc kubenswrapper[4779]: I0929 19:28:31.851803 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bac34d02-e097-4b82-afda-9b2d885c6fa4-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "bac34d02-e097-4b82-afda-9b2d885c6fa4" (UID: "bac34d02-e097-4b82-afda-9b2d885c6fa4"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:28:31 crc kubenswrapper[4779]: I0929 19:28:31.861433 4779 scope.go:117] "RemoveContainer" containerID="acfdc443fa3fae973aab723aa20f246dacd2162251a5997d5b312a762757de35" Sep 29 19:28:31 crc kubenswrapper[4779]: I0929 19:28:31.867113 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bac34d02-e097-4b82-afda-9b2d885c6fa4-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "bac34d02-e097-4b82-afda-9b2d885c6fa4" (UID: "bac34d02-e097-4b82-afda-9b2d885c6fa4"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:28:31 crc kubenswrapper[4779]: I0929 19:28:31.875920 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bac34d02-e097-4b82-afda-9b2d885c6fa4-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "bac34d02-e097-4b82-afda-9b2d885c6fa4" (UID: "bac34d02-e097-4b82-afda-9b2d885c6fa4"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:28:31 crc kubenswrapper[4779]: I0929 19:28:31.881310 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bac34d02-e097-4b82-afda-9b2d885c6fa4-config" (OuterVolumeSpecName: "config") pod "bac34d02-e097-4b82-afda-9b2d885c6fa4" (UID: "bac34d02-e097-4b82-afda-9b2d885c6fa4"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:28:31 crc kubenswrapper[4779]: I0929 19:28:31.881452 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bac34d02-e097-4b82-afda-9b2d885c6fa4-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "bac34d02-e097-4b82-afda-9b2d885c6fa4" (UID: "bac34d02-e097-4b82-afda-9b2d885c6fa4"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:28:31 crc kubenswrapper[4779]: I0929 19:28:31.886553 4779 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/bac34d02-e097-4b82-afda-9b2d885c6fa4-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 29 19:28:31 crc kubenswrapper[4779]: I0929 19:28:31.886599 4779 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/bac34d02-e097-4b82-afda-9b2d885c6fa4-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Sep 29 19:28:31 crc kubenswrapper[4779]: I0929 19:28:31.886614 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tvwpl\" (UniqueName: \"kubernetes.io/projected/bac34d02-e097-4b82-afda-9b2d885c6fa4-kube-api-access-tvwpl\") on node \"crc\" DevicePath \"\"" Sep 29 19:28:31 crc kubenswrapper[4779]: I0929 19:28:31.886628 4779 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/bac34d02-e097-4b82-afda-9b2d885c6fa4-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Sep 29 19:28:31 crc kubenswrapper[4779]: I0929 19:28:31.886640 4779 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bac34d02-e097-4b82-afda-9b2d885c6fa4-config\") on node \"crc\" DevicePath \"\"" Sep 29 19:28:31 crc kubenswrapper[4779]: I0929 19:28:31.886651 4779 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/bac34d02-e097-4b82-afda-9b2d885c6fa4-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Sep 29 19:28:31 crc kubenswrapper[4779]: I0929 19:28:31.887850 4779 scope.go:117] "RemoveContainer" containerID="8f77a64600ab6f18c1e7bc3108a6f83bc8926d553e2b6541c7f35f82ebf77a23" Sep 29 19:28:31 crc kubenswrapper[4779]: E0929 19:28:31.888502 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8f77a64600ab6f18c1e7bc3108a6f83bc8926d553e2b6541c7f35f82ebf77a23\": container with ID starting with 8f77a64600ab6f18c1e7bc3108a6f83bc8926d553e2b6541c7f35f82ebf77a23 not found: ID does not exist" containerID="8f77a64600ab6f18c1e7bc3108a6f83bc8926d553e2b6541c7f35f82ebf77a23" Sep 29 19:28:31 crc kubenswrapper[4779]: I0929 19:28:31.888532 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8f77a64600ab6f18c1e7bc3108a6f83bc8926d553e2b6541c7f35f82ebf77a23"} err="failed to get container status \"8f77a64600ab6f18c1e7bc3108a6f83bc8926d553e2b6541c7f35f82ebf77a23\": rpc error: code = NotFound desc = could not find container \"8f77a64600ab6f18c1e7bc3108a6f83bc8926d553e2b6541c7f35f82ebf77a23\": container with ID starting with 8f77a64600ab6f18c1e7bc3108a6f83bc8926d553e2b6541c7f35f82ebf77a23 not found: ID does not exist" Sep 29 19:28:31 crc kubenswrapper[4779]: I0929 19:28:31.888551 4779 scope.go:117] "RemoveContainer" containerID="acfdc443fa3fae973aab723aa20f246dacd2162251a5997d5b312a762757de35" Sep 29 19:28:31 crc kubenswrapper[4779]: E0929 19:28:31.888800 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"acfdc443fa3fae973aab723aa20f246dacd2162251a5997d5b312a762757de35\": container with ID starting with acfdc443fa3fae973aab723aa20f246dacd2162251a5997d5b312a762757de35 not found: ID does not exist" containerID="acfdc443fa3fae973aab723aa20f246dacd2162251a5997d5b312a762757de35" Sep 29 19:28:31 crc kubenswrapper[4779]: I0929 19:28:31.888885 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"acfdc443fa3fae973aab723aa20f246dacd2162251a5997d5b312a762757de35"} err="failed to get container status \"acfdc443fa3fae973aab723aa20f246dacd2162251a5997d5b312a762757de35\": rpc error: code = NotFound desc = could not find container \"acfdc443fa3fae973aab723aa20f246dacd2162251a5997d5b312a762757de35\": container with ID starting with acfdc443fa3fae973aab723aa20f246dacd2162251a5997d5b312a762757de35 not found: ID does not exist" Sep 29 19:28:32 crc kubenswrapper[4779]: I0929 19:28:32.156008 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-845d6d6f59-5q9p4"] Sep 29 19:28:32 crc kubenswrapper[4779]: I0929 19:28:32.165839 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-845d6d6f59-5q9p4"] Sep 29 19:28:32 crc kubenswrapper[4779]: I0929 19:28:32.832278 4779 generic.go:334] "Generic (PLEG): container finished" podID="4a704332-3e25-4a05-80e2-4eaea9b72aa4" containerID="a92e6905751a267a02f55215445832b87b8dc981c0fd433e605b24d614a72388" exitCode=0 Sep 29 19:28:32 crc kubenswrapper[4779]: I0929 19:28:32.832333 4779 generic.go:334] "Generic (PLEG): container finished" podID="4a704332-3e25-4a05-80e2-4eaea9b72aa4" containerID="8f5fff182f0b5c91ef7a4724e798df98e5b0631a3fe5c198136d95cfda48fe98" exitCode=2 Sep 29 19:28:32 crc kubenswrapper[4779]: I0929 19:28:32.832312 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"4a704332-3e25-4a05-80e2-4eaea9b72aa4","Type":"ContainerDied","Data":"a92e6905751a267a02f55215445832b87b8dc981c0fd433e605b24d614a72388"} Sep 29 19:28:32 crc kubenswrapper[4779]: I0929 19:28:32.832382 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"4a704332-3e25-4a05-80e2-4eaea9b72aa4","Type":"ContainerDied","Data":"8f5fff182f0b5c91ef7a4724e798df98e5b0631a3fe5c198136d95cfda48fe98"} Sep 29 19:28:32 crc kubenswrapper[4779]: I0929 19:28:32.832402 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"4a704332-3e25-4a05-80e2-4eaea9b72aa4","Type":"ContainerDied","Data":"a03a572857870256704b1297341be6089c53e604b6074f93721688bcbc91f3e0"} Sep 29 19:28:32 crc kubenswrapper[4779]: I0929 19:28:32.832345 4779 generic.go:334] "Generic (PLEG): container finished" podID="4a704332-3e25-4a05-80e2-4eaea9b72aa4" containerID="a03a572857870256704b1297341be6089c53e604b6074f93721688bcbc91f3e0" exitCode=0 Sep 29 19:28:33 crc kubenswrapper[4779]: I0929 19:28:33.155125 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 19:28:33 crc kubenswrapper[4779]: I0929 19:28:33.210500 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gj7q8\" (UniqueName: \"kubernetes.io/projected/4a704332-3e25-4a05-80e2-4eaea9b72aa4-kube-api-access-gj7q8\") pod \"4a704332-3e25-4a05-80e2-4eaea9b72aa4\" (UID: \"4a704332-3e25-4a05-80e2-4eaea9b72aa4\") " Sep 29 19:28:33 crc kubenswrapper[4779]: I0929 19:28:33.210553 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4a704332-3e25-4a05-80e2-4eaea9b72aa4-combined-ca-bundle\") pod \"4a704332-3e25-4a05-80e2-4eaea9b72aa4\" (UID: \"4a704332-3e25-4a05-80e2-4eaea9b72aa4\") " Sep 29 19:28:33 crc kubenswrapper[4779]: I0929 19:28:33.210649 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/4a704332-3e25-4a05-80e2-4eaea9b72aa4-sg-core-conf-yaml\") pod \"4a704332-3e25-4a05-80e2-4eaea9b72aa4\" (UID: \"4a704332-3e25-4a05-80e2-4eaea9b72aa4\") " Sep 29 19:28:33 crc kubenswrapper[4779]: I0929 19:28:33.210718 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4a704332-3e25-4a05-80e2-4eaea9b72aa4-config-data\") pod \"4a704332-3e25-4a05-80e2-4eaea9b72aa4\" (UID: \"4a704332-3e25-4a05-80e2-4eaea9b72aa4\") " Sep 29 19:28:33 crc kubenswrapper[4779]: I0929 19:28:33.210758 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/4a704332-3e25-4a05-80e2-4eaea9b72aa4-ceilometer-tls-certs\") pod \"4a704332-3e25-4a05-80e2-4eaea9b72aa4\" (UID: \"4a704332-3e25-4a05-80e2-4eaea9b72aa4\") " Sep 29 19:28:33 crc kubenswrapper[4779]: I0929 19:28:33.210835 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/4a704332-3e25-4a05-80e2-4eaea9b72aa4-log-httpd\") pod \"4a704332-3e25-4a05-80e2-4eaea9b72aa4\" (UID: \"4a704332-3e25-4a05-80e2-4eaea9b72aa4\") " Sep 29 19:28:33 crc kubenswrapper[4779]: I0929 19:28:33.210927 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4a704332-3e25-4a05-80e2-4eaea9b72aa4-scripts\") pod \"4a704332-3e25-4a05-80e2-4eaea9b72aa4\" (UID: \"4a704332-3e25-4a05-80e2-4eaea9b72aa4\") " Sep 29 19:28:33 crc kubenswrapper[4779]: I0929 19:28:33.210969 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/4a704332-3e25-4a05-80e2-4eaea9b72aa4-run-httpd\") pod \"4a704332-3e25-4a05-80e2-4eaea9b72aa4\" (UID: \"4a704332-3e25-4a05-80e2-4eaea9b72aa4\") " Sep 29 19:28:33 crc kubenswrapper[4779]: I0929 19:28:33.211649 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4a704332-3e25-4a05-80e2-4eaea9b72aa4-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "4a704332-3e25-4a05-80e2-4eaea9b72aa4" (UID: "4a704332-3e25-4a05-80e2-4eaea9b72aa4"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 19:28:33 crc kubenswrapper[4779]: I0929 19:28:33.211670 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4a704332-3e25-4a05-80e2-4eaea9b72aa4-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "4a704332-3e25-4a05-80e2-4eaea9b72aa4" (UID: "4a704332-3e25-4a05-80e2-4eaea9b72aa4"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 19:28:33 crc kubenswrapper[4779]: I0929 19:28:33.216256 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4a704332-3e25-4a05-80e2-4eaea9b72aa4-scripts" (OuterVolumeSpecName: "scripts") pod "4a704332-3e25-4a05-80e2-4eaea9b72aa4" (UID: "4a704332-3e25-4a05-80e2-4eaea9b72aa4"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:28:33 crc kubenswrapper[4779]: I0929 19:28:33.216502 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4a704332-3e25-4a05-80e2-4eaea9b72aa4-kube-api-access-gj7q8" (OuterVolumeSpecName: "kube-api-access-gj7q8") pod "4a704332-3e25-4a05-80e2-4eaea9b72aa4" (UID: "4a704332-3e25-4a05-80e2-4eaea9b72aa4"). InnerVolumeSpecName "kube-api-access-gj7q8". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:28:33 crc kubenswrapper[4779]: I0929 19:28:33.251992 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4a704332-3e25-4a05-80e2-4eaea9b72aa4-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "4a704332-3e25-4a05-80e2-4eaea9b72aa4" (UID: "4a704332-3e25-4a05-80e2-4eaea9b72aa4"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:28:33 crc kubenswrapper[4779]: I0929 19:28:33.281128 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4a704332-3e25-4a05-80e2-4eaea9b72aa4-ceilometer-tls-certs" (OuterVolumeSpecName: "ceilometer-tls-certs") pod "4a704332-3e25-4a05-80e2-4eaea9b72aa4" (UID: "4a704332-3e25-4a05-80e2-4eaea9b72aa4"). InnerVolumeSpecName "ceilometer-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:28:33 crc kubenswrapper[4779]: I0929 19:28:33.310311 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4a704332-3e25-4a05-80e2-4eaea9b72aa4-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "4a704332-3e25-4a05-80e2-4eaea9b72aa4" (UID: "4a704332-3e25-4a05-80e2-4eaea9b72aa4"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:28:33 crc kubenswrapper[4779]: I0929 19:28:33.313441 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gj7q8\" (UniqueName: \"kubernetes.io/projected/4a704332-3e25-4a05-80e2-4eaea9b72aa4-kube-api-access-gj7q8\") on node \"crc\" DevicePath \"\"" Sep 29 19:28:33 crc kubenswrapper[4779]: I0929 19:28:33.313471 4779 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4a704332-3e25-4a05-80e2-4eaea9b72aa4-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 19:28:33 crc kubenswrapper[4779]: I0929 19:28:33.313480 4779 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/4a704332-3e25-4a05-80e2-4eaea9b72aa4-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Sep 29 19:28:33 crc kubenswrapper[4779]: I0929 19:28:33.313488 4779 reconciler_common.go:293] "Volume detached for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/4a704332-3e25-4a05-80e2-4eaea9b72aa4-ceilometer-tls-certs\") on node \"crc\" DevicePath \"\"" Sep 29 19:28:33 crc kubenswrapper[4779]: I0929 19:28:33.313496 4779 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/4a704332-3e25-4a05-80e2-4eaea9b72aa4-log-httpd\") on node \"crc\" DevicePath \"\"" Sep 29 19:28:33 crc kubenswrapper[4779]: I0929 19:28:33.313505 4779 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4a704332-3e25-4a05-80e2-4eaea9b72aa4-scripts\") on node \"crc\" DevicePath \"\"" Sep 29 19:28:33 crc kubenswrapper[4779]: I0929 19:28:33.313513 4779 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/4a704332-3e25-4a05-80e2-4eaea9b72aa4-run-httpd\") on node \"crc\" DevicePath \"\"" Sep 29 19:28:33 crc kubenswrapper[4779]: I0929 19:28:33.334142 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4a704332-3e25-4a05-80e2-4eaea9b72aa4-config-data" (OuterVolumeSpecName: "config-data") pod "4a704332-3e25-4a05-80e2-4eaea9b72aa4" (UID: "4a704332-3e25-4a05-80e2-4eaea9b72aa4"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:28:33 crc kubenswrapper[4779]: I0929 19:28:33.415310 4779 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4a704332-3e25-4a05-80e2-4eaea9b72aa4-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 19:28:33 crc kubenswrapper[4779]: I0929 19:28:33.789103 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bac34d02-e097-4b82-afda-9b2d885c6fa4" path="/var/lib/kubelet/pods/bac34d02-e097-4b82-afda-9b2d885c6fa4/volumes" Sep 29 19:28:33 crc kubenswrapper[4779]: I0929 19:28:33.851742 4779 generic.go:334] "Generic (PLEG): container finished" podID="4a704332-3e25-4a05-80e2-4eaea9b72aa4" containerID="1283e86dab134a0e1e85d42bd331349844cd4518321b992e5a1a1e9bb57b9564" exitCode=0 Sep 29 19:28:33 crc kubenswrapper[4779]: I0929 19:28:33.851825 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"4a704332-3e25-4a05-80e2-4eaea9b72aa4","Type":"ContainerDied","Data":"1283e86dab134a0e1e85d42bd331349844cd4518321b992e5a1a1e9bb57b9564"} Sep 29 19:28:33 crc kubenswrapper[4779]: I0929 19:28:33.851894 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"4a704332-3e25-4a05-80e2-4eaea9b72aa4","Type":"ContainerDied","Data":"fc6cf3b29cc95a3683f7569def345855ca3222ebae2ca52fb284f3f931a6d0c0"} Sep 29 19:28:33 crc kubenswrapper[4779]: I0929 19:28:33.851939 4779 scope.go:117] "RemoveContainer" containerID="a92e6905751a267a02f55215445832b87b8dc981c0fd433e605b24d614a72388" Sep 29 19:28:33 crc kubenswrapper[4779]: I0929 19:28:33.851977 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 19:28:33 crc kubenswrapper[4779]: I0929 19:28:33.886694 4779 scope.go:117] "RemoveContainer" containerID="8f5fff182f0b5c91ef7a4724e798df98e5b0631a3fe5c198136d95cfda48fe98" Sep 29 19:28:33 crc kubenswrapper[4779]: I0929 19:28:33.902433 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Sep 29 19:28:33 crc kubenswrapper[4779]: I0929 19:28:33.921944 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Sep 29 19:28:33 crc kubenswrapper[4779]: I0929 19:28:33.932583 4779 scope.go:117] "RemoveContainer" containerID="a03a572857870256704b1297341be6089c53e604b6074f93721688bcbc91f3e0" Sep 29 19:28:33 crc kubenswrapper[4779]: I0929 19:28:33.943278 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Sep 29 19:28:33 crc kubenswrapper[4779]: E0929 19:28:33.943729 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4a704332-3e25-4a05-80e2-4eaea9b72aa4" containerName="ceilometer-central-agent" Sep 29 19:28:33 crc kubenswrapper[4779]: I0929 19:28:33.943753 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="4a704332-3e25-4a05-80e2-4eaea9b72aa4" containerName="ceilometer-central-agent" Sep 29 19:28:33 crc kubenswrapper[4779]: E0929 19:28:33.943783 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4a704332-3e25-4a05-80e2-4eaea9b72aa4" containerName="proxy-httpd" Sep 29 19:28:33 crc kubenswrapper[4779]: I0929 19:28:33.943792 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="4a704332-3e25-4a05-80e2-4eaea9b72aa4" containerName="proxy-httpd" Sep 29 19:28:33 crc kubenswrapper[4779]: E0929 19:28:33.943811 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4a704332-3e25-4a05-80e2-4eaea9b72aa4" containerName="sg-core" Sep 29 19:28:33 crc kubenswrapper[4779]: I0929 19:28:33.943820 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="4a704332-3e25-4a05-80e2-4eaea9b72aa4" containerName="sg-core" Sep 29 19:28:33 crc kubenswrapper[4779]: E0929 19:28:33.943843 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bac34d02-e097-4b82-afda-9b2d885c6fa4" containerName="init" Sep 29 19:28:33 crc kubenswrapper[4779]: I0929 19:28:33.943850 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="bac34d02-e097-4b82-afda-9b2d885c6fa4" containerName="init" Sep 29 19:28:33 crc kubenswrapper[4779]: E0929 19:28:33.943867 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bac34d02-e097-4b82-afda-9b2d885c6fa4" containerName="dnsmasq-dns" Sep 29 19:28:33 crc kubenswrapper[4779]: I0929 19:28:33.943875 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="bac34d02-e097-4b82-afda-9b2d885c6fa4" containerName="dnsmasq-dns" Sep 29 19:28:33 crc kubenswrapper[4779]: E0929 19:28:33.943886 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4a704332-3e25-4a05-80e2-4eaea9b72aa4" containerName="ceilometer-notification-agent" Sep 29 19:28:33 crc kubenswrapper[4779]: I0929 19:28:33.943894 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="4a704332-3e25-4a05-80e2-4eaea9b72aa4" containerName="ceilometer-notification-agent" Sep 29 19:28:33 crc kubenswrapper[4779]: I0929 19:28:33.944124 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="4a704332-3e25-4a05-80e2-4eaea9b72aa4" containerName="ceilometer-central-agent" Sep 29 19:28:33 crc kubenswrapper[4779]: I0929 19:28:33.944142 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="4a704332-3e25-4a05-80e2-4eaea9b72aa4" containerName="ceilometer-notification-agent" Sep 29 19:28:33 crc kubenswrapper[4779]: I0929 19:28:33.944168 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="4a704332-3e25-4a05-80e2-4eaea9b72aa4" containerName="proxy-httpd" Sep 29 19:28:33 crc kubenswrapper[4779]: I0929 19:28:33.944183 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="bac34d02-e097-4b82-afda-9b2d885c6fa4" containerName="dnsmasq-dns" Sep 29 19:28:33 crc kubenswrapper[4779]: I0929 19:28:33.944196 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="4a704332-3e25-4a05-80e2-4eaea9b72aa4" containerName="sg-core" Sep 29 19:28:33 crc kubenswrapper[4779]: I0929 19:28:33.947114 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 19:28:33 crc kubenswrapper[4779]: I0929 19:28:33.950852 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Sep 29 19:28:33 crc kubenswrapper[4779]: I0929 19:28:33.950960 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Sep 29 19:28:33 crc kubenswrapper[4779]: I0929 19:28:33.951221 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Sep 29 19:28:33 crc kubenswrapper[4779]: I0929 19:28:33.967687 4779 scope.go:117] "RemoveContainer" containerID="1283e86dab134a0e1e85d42bd331349844cd4518321b992e5a1a1e9bb57b9564" Sep 29 19:28:33 crc kubenswrapper[4779]: I0929 19:28:33.970445 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 29 19:28:34 crc kubenswrapper[4779]: I0929 19:28:34.018384 4779 scope.go:117] "RemoveContainer" containerID="a92e6905751a267a02f55215445832b87b8dc981c0fd433e605b24d614a72388" Sep 29 19:28:34 crc kubenswrapper[4779]: E0929 19:28:34.021811 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a92e6905751a267a02f55215445832b87b8dc981c0fd433e605b24d614a72388\": container with ID starting with a92e6905751a267a02f55215445832b87b8dc981c0fd433e605b24d614a72388 not found: ID does not exist" containerID="a92e6905751a267a02f55215445832b87b8dc981c0fd433e605b24d614a72388" Sep 29 19:28:34 crc kubenswrapper[4779]: I0929 19:28:34.021963 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a92e6905751a267a02f55215445832b87b8dc981c0fd433e605b24d614a72388"} err="failed to get container status \"a92e6905751a267a02f55215445832b87b8dc981c0fd433e605b24d614a72388\": rpc error: code = NotFound desc = could not find container \"a92e6905751a267a02f55215445832b87b8dc981c0fd433e605b24d614a72388\": container with ID starting with a92e6905751a267a02f55215445832b87b8dc981c0fd433e605b24d614a72388 not found: ID does not exist" Sep 29 19:28:34 crc kubenswrapper[4779]: I0929 19:28:34.022113 4779 scope.go:117] "RemoveContainer" containerID="8f5fff182f0b5c91ef7a4724e798df98e5b0631a3fe5c198136d95cfda48fe98" Sep 29 19:28:34 crc kubenswrapper[4779]: E0929 19:28:34.022979 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8f5fff182f0b5c91ef7a4724e798df98e5b0631a3fe5c198136d95cfda48fe98\": container with ID starting with 8f5fff182f0b5c91ef7a4724e798df98e5b0631a3fe5c198136d95cfda48fe98 not found: ID does not exist" containerID="8f5fff182f0b5c91ef7a4724e798df98e5b0631a3fe5c198136d95cfda48fe98" Sep 29 19:28:34 crc kubenswrapper[4779]: I0929 19:28:34.023040 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8f5fff182f0b5c91ef7a4724e798df98e5b0631a3fe5c198136d95cfda48fe98"} err="failed to get container status \"8f5fff182f0b5c91ef7a4724e798df98e5b0631a3fe5c198136d95cfda48fe98\": rpc error: code = NotFound desc = could not find container \"8f5fff182f0b5c91ef7a4724e798df98e5b0631a3fe5c198136d95cfda48fe98\": container with ID starting with 8f5fff182f0b5c91ef7a4724e798df98e5b0631a3fe5c198136d95cfda48fe98 not found: ID does not exist" Sep 29 19:28:34 crc kubenswrapper[4779]: I0929 19:28:34.023082 4779 scope.go:117] "RemoveContainer" containerID="a03a572857870256704b1297341be6089c53e604b6074f93721688bcbc91f3e0" Sep 29 19:28:34 crc kubenswrapper[4779]: E0929 19:28:34.023512 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a03a572857870256704b1297341be6089c53e604b6074f93721688bcbc91f3e0\": container with ID starting with a03a572857870256704b1297341be6089c53e604b6074f93721688bcbc91f3e0 not found: ID does not exist" containerID="a03a572857870256704b1297341be6089c53e604b6074f93721688bcbc91f3e0" Sep 29 19:28:34 crc kubenswrapper[4779]: I0929 19:28:34.023586 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a03a572857870256704b1297341be6089c53e604b6074f93721688bcbc91f3e0"} err="failed to get container status \"a03a572857870256704b1297341be6089c53e604b6074f93721688bcbc91f3e0\": rpc error: code = NotFound desc = could not find container \"a03a572857870256704b1297341be6089c53e604b6074f93721688bcbc91f3e0\": container with ID starting with a03a572857870256704b1297341be6089c53e604b6074f93721688bcbc91f3e0 not found: ID does not exist" Sep 29 19:28:34 crc kubenswrapper[4779]: I0929 19:28:34.023610 4779 scope.go:117] "RemoveContainer" containerID="1283e86dab134a0e1e85d42bd331349844cd4518321b992e5a1a1e9bb57b9564" Sep 29 19:28:34 crc kubenswrapper[4779]: E0929 19:28:34.024148 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1283e86dab134a0e1e85d42bd331349844cd4518321b992e5a1a1e9bb57b9564\": container with ID starting with 1283e86dab134a0e1e85d42bd331349844cd4518321b992e5a1a1e9bb57b9564 not found: ID does not exist" containerID="1283e86dab134a0e1e85d42bd331349844cd4518321b992e5a1a1e9bb57b9564" Sep 29 19:28:34 crc kubenswrapper[4779]: I0929 19:28:34.024189 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1283e86dab134a0e1e85d42bd331349844cd4518321b992e5a1a1e9bb57b9564"} err="failed to get container status \"1283e86dab134a0e1e85d42bd331349844cd4518321b992e5a1a1e9bb57b9564\": rpc error: code = NotFound desc = could not find container \"1283e86dab134a0e1e85d42bd331349844cd4518321b992e5a1a1e9bb57b9564\": container with ID starting with 1283e86dab134a0e1e85d42bd331349844cd4518321b992e5a1a1e9bb57b9564 not found: ID does not exist" Sep 29 19:28:34 crc kubenswrapper[4779]: I0929 19:28:34.029530 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/743a0275-ddbc-4917-adf2-0d268c8fe08b-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"743a0275-ddbc-4917-adf2-0d268c8fe08b\") " pod="openstack/ceilometer-0" Sep 29 19:28:34 crc kubenswrapper[4779]: I0929 19:28:34.029573 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/743a0275-ddbc-4917-adf2-0d268c8fe08b-log-httpd\") pod \"ceilometer-0\" (UID: \"743a0275-ddbc-4917-adf2-0d268c8fe08b\") " pod="openstack/ceilometer-0" Sep 29 19:28:34 crc kubenswrapper[4779]: I0929 19:28:34.029605 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/743a0275-ddbc-4917-adf2-0d268c8fe08b-run-httpd\") pod \"ceilometer-0\" (UID: \"743a0275-ddbc-4917-adf2-0d268c8fe08b\") " pod="openstack/ceilometer-0" Sep 29 19:28:34 crc kubenswrapper[4779]: I0929 19:28:34.029686 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nbrvk\" (UniqueName: \"kubernetes.io/projected/743a0275-ddbc-4917-adf2-0d268c8fe08b-kube-api-access-nbrvk\") pod \"ceilometer-0\" (UID: \"743a0275-ddbc-4917-adf2-0d268c8fe08b\") " pod="openstack/ceilometer-0" Sep 29 19:28:34 crc kubenswrapper[4779]: I0929 19:28:34.029721 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/743a0275-ddbc-4917-adf2-0d268c8fe08b-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"743a0275-ddbc-4917-adf2-0d268c8fe08b\") " pod="openstack/ceilometer-0" Sep 29 19:28:34 crc kubenswrapper[4779]: I0929 19:28:34.029761 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/743a0275-ddbc-4917-adf2-0d268c8fe08b-scripts\") pod \"ceilometer-0\" (UID: \"743a0275-ddbc-4917-adf2-0d268c8fe08b\") " pod="openstack/ceilometer-0" Sep 29 19:28:34 crc kubenswrapper[4779]: I0929 19:28:34.029792 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/743a0275-ddbc-4917-adf2-0d268c8fe08b-config-data\") pod \"ceilometer-0\" (UID: \"743a0275-ddbc-4917-adf2-0d268c8fe08b\") " pod="openstack/ceilometer-0" Sep 29 19:28:34 crc kubenswrapper[4779]: I0929 19:28:34.029886 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/743a0275-ddbc-4917-adf2-0d268c8fe08b-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"743a0275-ddbc-4917-adf2-0d268c8fe08b\") " pod="openstack/ceilometer-0" Sep 29 19:28:34 crc kubenswrapper[4779]: I0929 19:28:34.131282 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nbrvk\" (UniqueName: \"kubernetes.io/projected/743a0275-ddbc-4917-adf2-0d268c8fe08b-kube-api-access-nbrvk\") pod \"ceilometer-0\" (UID: \"743a0275-ddbc-4917-adf2-0d268c8fe08b\") " pod="openstack/ceilometer-0" Sep 29 19:28:34 crc kubenswrapper[4779]: I0929 19:28:34.132028 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/743a0275-ddbc-4917-adf2-0d268c8fe08b-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"743a0275-ddbc-4917-adf2-0d268c8fe08b\") " pod="openstack/ceilometer-0" Sep 29 19:28:34 crc kubenswrapper[4779]: I0929 19:28:34.133006 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/743a0275-ddbc-4917-adf2-0d268c8fe08b-scripts\") pod \"ceilometer-0\" (UID: \"743a0275-ddbc-4917-adf2-0d268c8fe08b\") " pod="openstack/ceilometer-0" Sep 29 19:28:34 crc kubenswrapper[4779]: I0929 19:28:34.133061 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/743a0275-ddbc-4917-adf2-0d268c8fe08b-config-data\") pod \"ceilometer-0\" (UID: \"743a0275-ddbc-4917-adf2-0d268c8fe08b\") " pod="openstack/ceilometer-0" Sep 29 19:28:34 crc kubenswrapper[4779]: I0929 19:28:34.133447 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/743a0275-ddbc-4917-adf2-0d268c8fe08b-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"743a0275-ddbc-4917-adf2-0d268c8fe08b\") " pod="openstack/ceilometer-0" Sep 29 19:28:34 crc kubenswrapper[4779]: I0929 19:28:34.133511 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/743a0275-ddbc-4917-adf2-0d268c8fe08b-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"743a0275-ddbc-4917-adf2-0d268c8fe08b\") " pod="openstack/ceilometer-0" Sep 29 19:28:34 crc kubenswrapper[4779]: I0929 19:28:34.133541 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/743a0275-ddbc-4917-adf2-0d268c8fe08b-log-httpd\") pod \"ceilometer-0\" (UID: \"743a0275-ddbc-4917-adf2-0d268c8fe08b\") " pod="openstack/ceilometer-0" Sep 29 19:28:34 crc kubenswrapper[4779]: I0929 19:28:34.133583 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/743a0275-ddbc-4917-adf2-0d268c8fe08b-run-httpd\") pod \"ceilometer-0\" (UID: \"743a0275-ddbc-4917-adf2-0d268c8fe08b\") " pod="openstack/ceilometer-0" Sep 29 19:28:34 crc kubenswrapper[4779]: I0929 19:28:34.134089 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/743a0275-ddbc-4917-adf2-0d268c8fe08b-log-httpd\") pod \"ceilometer-0\" (UID: \"743a0275-ddbc-4917-adf2-0d268c8fe08b\") " pod="openstack/ceilometer-0" Sep 29 19:28:34 crc kubenswrapper[4779]: I0929 19:28:34.134194 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/743a0275-ddbc-4917-adf2-0d268c8fe08b-run-httpd\") pod \"ceilometer-0\" (UID: \"743a0275-ddbc-4917-adf2-0d268c8fe08b\") " pod="openstack/ceilometer-0" Sep 29 19:28:34 crc kubenswrapper[4779]: I0929 19:28:34.136122 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/743a0275-ddbc-4917-adf2-0d268c8fe08b-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"743a0275-ddbc-4917-adf2-0d268c8fe08b\") " pod="openstack/ceilometer-0" Sep 29 19:28:34 crc kubenswrapper[4779]: I0929 19:28:34.137873 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/743a0275-ddbc-4917-adf2-0d268c8fe08b-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"743a0275-ddbc-4917-adf2-0d268c8fe08b\") " pod="openstack/ceilometer-0" Sep 29 19:28:34 crc kubenswrapper[4779]: I0929 19:28:34.139146 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/743a0275-ddbc-4917-adf2-0d268c8fe08b-config-data\") pod \"ceilometer-0\" (UID: \"743a0275-ddbc-4917-adf2-0d268c8fe08b\") " pod="openstack/ceilometer-0" Sep 29 19:28:34 crc kubenswrapper[4779]: I0929 19:28:34.140592 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/743a0275-ddbc-4917-adf2-0d268c8fe08b-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"743a0275-ddbc-4917-adf2-0d268c8fe08b\") " pod="openstack/ceilometer-0" Sep 29 19:28:34 crc kubenswrapper[4779]: I0929 19:28:34.141251 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/743a0275-ddbc-4917-adf2-0d268c8fe08b-scripts\") pod \"ceilometer-0\" (UID: \"743a0275-ddbc-4917-adf2-0d268c8fe08b\") " pod="openstack/ceilometer-0" Sep 29 19:28:34 crc kubenswrapper[4779]: I0929 19:28:34.153175 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nbrvk\" (UniqueName: \"kubernetes.io/projected/743a0275-ddbc-4917-adf2-0d268c8fe08b-kube-api-access-nbrvk\") pod \"ceilometer-0\" (UID: \"743a0275-ddbc-4917-adf2-0d268c8fe08b\") " pod="openstack/ceilometer-0" Sep 29 19:28:34 crc kubenswrapper[4779]: I0929 19:28:34.289145 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 19:28:34 crc kubenswrapper[4779]: I0929 19:28:34.753003 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 29 19:28:34 crc kubenswrapper[4779]: W0929 19:28:34.761056 4779 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod743a0275_ddbc_4917_adf2_0d268c8fe08b.slice/crio-16889f639d1eaa6c24a48399ad21b3b3a13ef4384f09e443c2817266d4b3783c WatchSource:0}: Error finding container 16889f639d1eaa6c24a48399ad21b3b3a13ef4384f09e443c2817266d4b3783c: Status 404 returned error can't find the container with id 16889f639d1eaa6c24a48399ad21b3b3a13ef4384f09e443c2817266d4b3783c Sep 29 19:28:34 crc kubenswrapper[4779]: I0929 19:28:34.863734 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"743a0275-ddbc-4917-adf2-0d268c8fe08b","Type":"ContainerStarted","Data":"16889f639d1eaa6c24a48399ad21b3b3a13ef4384f09e443c2817266d4b3783c"} Sep 29 19:28:35 crc kubenswrapper[4779]: I0929 19:28:35.786677 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4a704332-3e25-4a05-80e2-4eaea9b72aa4" path="/var/lib/kubelet/pods/4a704332-3e25-4a05-80e2-4eaea9b72aa4/volumes" Sep 29 19:28:35 crc kubenswrapper[4779]: I0929 19:28:35.876353 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"743a0275-ddbc-4917-adf2-0d268c8fe08b","Type":"ContainerStarted","Data":"dd12e3d05aa06232dd6fad9861b575cdd40a430d3aa8295cb0fc6d2995a508d9"} Sep 29 19:28:36 crc kubenswrapper[4779]: I0929 19:28:36.905145 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"743a0275-ddbc-4917-adf2-0d268c8fe08b","Type":"ContainerStarted","Data":"9b556c24af7b8a754359b6662069ff9602de70c2fa938c6804853fc7bb27c071"} Sep 29 19:28:37 crc kubenswrapper[4779]: I0929 19:28:37.918757 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"743a0275-ddbc-4917-adf2-0d268c8fe08b","Type":"ContainerStarted","Data":"b9f2a8458832ed27cb52b92cc4a607deaee32d236e2fbaff729d1ec94f6c54ac"} Sep 29 19:28:38 crc kubenswrapper[4779]: I0929 19:28:38.115983 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Sep 29 19:28:38 crc kubenswrapper[4779]: I0929 19:28:38.116049 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Sep 29 19:28:38 crc kubenswrapper[4779]: I0929 19:28:38.931980 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"743a0275-ddbc-4917-adf2-0d268c8fe08b","Type":"ContainerStarted","Data":"3884fb8a42db9f5bc8748b028b93e6aa58495ab41f1741b488f1e100e11d68ab"} Sep 29 19:28:38 crc kubenswrapper[4779]: I0929 19:28:38.933350 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Sep 29 19:28:38 crc kubenswrapper[4779]: I0929 19:28:38.961863 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.3206335080000002 podStartE2EDuration="5.961836009s" podCreationTimestamp="2025-09-29 19:28:33 +0000 UTC" firstStartedPulling="2025-09-29 19:28:34.764525179 +0000 UTC m=+1225.648950289" lastFinishedPulling="2025-09-29 19:28:38.40572769 +0000 UTC m=+1229.290152790" observedRunningTime="2025-09-29 19:28:38.960749779 +0000 UTC m=+1229.845174889" watchObservedRunningTime="2025-09-29 19:28:38.961836009 +0000 UTC m=+1229.846261119" Sep 29 19:28:39 crc kubenswrapper[4779]: I0929 19:28:39.131468 4779 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="5d230bec-c78b-45a3-b334-7353e1a8b827" containerName="nova-api-api" probeResult="failure" output="Get \"https://10.217.0.206:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Sep 29 19:28:39 crc kubenswrapper[4779]: I0929 19:28:39.131517 4779 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="5d230bec-c78b-45a3-b334-7353e1a8b827" containerName="nova-api-log" probeResult="failure" output="Get \"https://10.217.0.206:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Sep 29 19:28:43 crc kubenswrapper[4779]: I0929 19:28:43.785274 4779 patch_prober.go:28] interesting pod/machine-config-daemon-d5cnr container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 19:28:43 crc kubenswrapper[4779]: I0929 19:28:43.786036 4779 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" podUID="476bc421-1113-455e-bcc8-e207e47dad19" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 19:28:48 crc kubenswrapper[4779]: I0929 19:28:48.121989 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Sep 29 19:28:48 crc kubenswrapper[4779]: I0929 19:28:48.122730 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Sep 29 19:28:48 crc kubenswrapper[4779]: I0929 19:28:48.123499 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Sep 29 19:28:48 crc kubenswrapper[4779]: I0929 19:28:48.123956 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Sep 29 19:28:48 crc kubenswrapper[4779]: I0929 19:28:48.129571 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Sep 29 19:28:48 crc kubenswrapper[4779]: I0929 19:28:48.130292 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Sep 29 19:29:04 crc kubenswrapper[4779]: I0929 19:29:04.304359 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Sep 29 19:29:13 crc kubenswrapper[4779]: I0929 19:29:13.784842 4779 patch_prober.go:28] interesting pod/machine-config-daemon-d5cnr container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 19:29:13 crc kubenswrapper[4779]: I0929 19:29:13.785485 4779 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" podUID="476bc421-1113-455e-bcc8-e207e47dad19" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 19:29:13 crc kubenswrapper[4779]: I0929 19:29:13.785545 4779 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" Sep 29 19:29:13 crc kubenswrapper[4779]: I0929 19:29:13.786591 4779 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"d6651225f345c1ab3f5b037a81a9d8ef2b74dcbcb999a569db1d5ddc8894af03"} pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 29 19:29:13 crc kubenswrapper[4779]: I0929 19:29:13.786694 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" podUID="476bc421-1113-455e-bcc8-e207e47dad19" containerName="machine-config-daemon" containerID="cri-o://d6651225f345c1ab3f5b037a81a9d8ef2b74dcbcb999a569db1d5ddc8894af03" gracePeriod=600 Sep 29 19:29:14 crc kubenswrapper[4779]: I0929 19:29:14.149617 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-server-0"] Sep 29 19:29:14 crc kubenswrapper[4779]: I0929 19:29:14.365399 4779 generic.go:334] "Generic (PLEG): container finished" podID="476bc421-1113-455e-bcc8-e207e47dad19" containerID="d6651225f345c1ab3f5b037a81a9d8ef2b74dcbcb999a569db1d5ddc8894af03" exitCode=0 Sep 29 19:29:14 crc kubenswrapper[4779]: I0929 19:29:14.365451 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" event={"ID":"476bc421-1113-455e-bcc8-e207e47dad19","Type":"ContainerDied","Data":"d6651225f345c1ab3f5b037a81a9d8ef2b74dcbcb999a569db1d5ddc8894af03"} Sep 29 19:29:14 crc kubenswrapper[4779]: I0929 19:29:14.365476 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" event={"ID":"476bc421-1113-455e-bcc8-e207e47dad19","Type":"ContainerStarted","Data":"d53b5519458bc3537a9f2faacb6e07e9c914e69c07211264054bfd272bc67ba6"} Sep 29 19:29:14 crc kubenswrapper[4779]: I0929 19:29:14.365494 4779 scope.go:117] "RemoveContainer" containerID="0dd6acb2d9b3673c7f5bb54457583b38c1079b65f1949a29faac6a92347b5460" Sep 29 19:29:14 crc kubenswrapper[4779]: I0929 19:29:14.932805 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Sep 29 19:29:18 crc kubenswrapper[4779]: I0929 19:29:18.657608 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/rabbitmq-server-0" podUID="523fd020-2e02-4807-93b8-82ecbd1152eb" containerName="rabbitmq" containerID="cri-o://f34ea16198ddb29fae0653ed2ebdd84481e2bc0a905b30a3be2c039009b4e204" gracePeriod=604796 Sep 29 19:29:19 crc kubenswrapper[4779]: I0929 19:29:19.289630 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/rabbitmq-cell1-server-0" podUID="549de7a5-30db-464d-bd6b-a6dcca25691d" containerName="rabbitmq" containerID="cri-o://769ca8e086cd076049a9814691cb9da9fc29915023965294dedbf3186a6b3bb5" gracePeriod=604796 Sep 29 19:29:21 crc kubenswrapper[4779]: I0929 19:29:21.981752 4779 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/rabbitmq-server-0" podUID="523fd020-2e02-4807-93b8-82ecbd1152eb" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.100:5671: connect: connection refused" Sep 29 19:29:22 crc kubenswrapper[4779]: I0929 19:29:22.275084 4779 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/rabbitmq-cell1-server-0" podUID="549de7a5-30db-464d-bd6b-a6dcca25691d" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.101:5671: connect: connection refused" Sep 29 19:29:25 crc kubenswrapper[4779]: I0929 19:29:25.308139 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Sep 29 19:29:25 crc kubenswrapper[4779]: I0929 19:29:25.408162 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/523fd020-2e02-4807-93b8-82ecbd1152eb-rabbitmq-plugins\") pod \"523fd020-2e02-4807-93b8-82ecbd1152eb\" (UID: \"523fd020-2e02-4807-93b8-82ecbd1152eb\") " Sep 29 19:29:25 crc kubenswrapper[4779]: I0929 19:29:25.408371 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/523fd020-2e02-4807-93b8-82ecbd1152eb-config-data\") pod \"523fd020-2e02-4807-93b8-82ecbd1152eb\" (UID: \"523fd020-2e02-4807-93b8-82ecbd1152eb\") " Sep 29 19:29:25 crc kubenswrapper[4779]: I0929 19:29:25.408412 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/523fd020-2e02-4807-93b8-82ecbd1152eb-server-conf\") pod \"523fd020-2e02-4807-93b8-82ecbd1152eb\" (UID: \"523fd020-2e02-4807-93b8-82ecbd1152eb\") " Sep 29 19:29:25 crc kubenswrapper[4779]: I0929 19:29:25.408433 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/523fd020-2e02-4807-93b8-82ecbd1152eb-plugins-conf\") pod \"523fd020-2e02-4807-93b8-82ecbd1152eb\" (UID: \"523fd020-2e02-4807-93b8-82ecbd1152eb\") " Sep 29 19:29:25 crc kubenswrapper[4779]: I0929 19:29:25.409259 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/523fd020-2e02-4807-93b8-82ecbd1152eb-pod-info\") pod \"523fd020-2e02-4807-93b8-82ecbd1152eb\" (UID: \"523fd020-2e02-4807-93b8-82ecbd1152eb\") " Sep 29 19:29:25 crc kubenswrapper[4779]: I0929 19:29:25.409379 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mpj5n\" (UniqueName: \"kubernetes.io/projected/523fd020-2e02-4807-93b8-82ecbd1152eb-kube-api-access-mpj5n\") pod \"523fd020-2e02-4807-93b8-82ecbd1152eb\" (UID: \"523fd020-2e02-4807-93b8-82ecbd1152eb\") " Sep 29 19:29:25 crc kubenswrapper[4779]: I0929 19:29:25.409436 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/523fd020-2e02-4807-93b8-82ecbd1152eb-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "523fd020-2e02-4807-93b8-82ecbd1152eb" (UID: "523fd020-2e02-4807-93b8-82ecbd1152eb"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:29:25 crc kubenswrapper[4779]: I0929 19:29:25.409481 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/523fd020-2e02-4807-93b8-82ecbd1152eb-erlang-cookie-secret\") pod \"523fd020-2e02-4807-93b8-82ecbd1152eb\" (UID: \"523fd020-2e02-4807-93b8-82ecbd1152eb\") " Sep 29 19:29:25 crc kubenswrapper[4779]: I0929 19:29:25.409525 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/523fd020-2e02-4807-93b8-82ecbd1152eb-rabbitmq-tls\") pod \"523fd020-2e02-4807-93b8-82ecbd1152eb\" (UID: \"523fd020-2e02-4807-93b8-82ecbd1152eb\") " Sep 29 19:29:25 crc kubenswrapper[4779]: I0929 19:29:25.409563 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/523fd020-2e02-4807-93b8-82ecbd1152eb-rabbitmq-confd\") pod \"523fd020-2e02-4807-93b8-82ecbd1152eb\" (UID: \"523fd020-2e02-4807-93b8-82ecbd1152eb\") " Sep 29 19:29:25 crc kubenswrapper[4779]: I0929 19:29:25.409591 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"523fd020-2e02-4807-93b8-82ecbd1152eb\" (UID: \"523fd020-2e02-4807-93b8-82ecbd1152eb\") " Sep 29 19:29:25 crc kubenswrapper[4779]: I0929 19:29:25.409628 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/523fd020-2e02-4807-93b8-82ecbd1152eb-rabbitmq-erlang-cookie\") pod \"523fd020-2e02-4807-93b8-82ecbd1152eb\" (UID: \"523fd020-2e02-4807-93b8-82ecbd1152eb\") " Sep 29 19:29:25 crc kubenswrapper[4779]: I0929 19:29:25.410388 4779 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/523fd020-2e02-4807-93b8-82ecbd1152eb-plugins-conf\") on node \"crc\" DevicePath \"\"" Sep 29 19:29:25 crc kubenswrapper[4779]: I0929 19:29:25.411417 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/523fd020-2e02-4807-93b8-82ecbd1152eb-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "523fd020-2e02-4807-93b8-82ecbd1152eb" (UID: "523fd020-2e02-4807-93b8-82ecbd1152eb"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 19:29:25 crc kubenswrapper[4779]: I0929 19:29:25.415548 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/523fd020-2e02-4807-93b8-82ecbd1152eb-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "523fd020-2e02-4807-93b8-82ecbd1152eb" (UID: "523fd020-2e02-4807-93b8-82ecbd1152eb"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 19:29:25 crc kubenswrapper[4779]: I0929 19:29:25.415903 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/523fd020-2e02-4807-93b8-82ecbd1152eb-pod-info" (OuterVolumeSpecName: "pod-info") pod "523fd020-2e02-4807-93b8-82ecbd1152eb" (UID: "523fd020-2e02-4807-93b8-82ecbd1152eb"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Sep 29 19:29:25 crc kubenswrapper[4779]: I0929 19:29:25.417885 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/523fd020-2e02-4807-93b8-82ecbd1152eb-rabbitmq-tls" (OuterVolumeSpecName: "rabbitmq-tls") pod "523fd020-2e02-4807-93b8-82ecbd1152eb" (UID: "523fd020-2e02-4807-93b8-82ecbd1152eb"). InnerVolumeSpecName "rabbitmq-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:29:25 crc kubenswrapper[4779]: I0929 19:29:25.418757 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage04-crc" (OuterVolumeSpecName: "persistence") pod "523fd020-2e02-4807-93b8-82ecbd1152eb" (UID: "523fd020-2e02-4807-93b8-82ecbd1152eb"). InnerVolumeSpecName "local-storage04-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Sep 29 19:29:25 crc kubenswrapper[4779]: I0929 19:29:25.443612 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/523fd020-2e02-4807-93b8-82ecbd1152eb-kube-api-access-mpj5n" (OuterVolumeSpecName: "kube-api-access-mpj5n") pod "523fd020-2e02-4807-93b8-82ecbd1152eb" (UID: "523fd020-2e02-4807-93b8-82ecbd1152eb"). InnerVolumeSpecName "kube-api-access-mpj5n". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:29:25 crc kubenswrapper[4779]: I0929 19:29:25.457724 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/523fd020-2e02-4807-93b8-82ecbd1152eb-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "523fd020-2e02-4807-93b8-82ecbd1152eb" (UID: "523fd020-2e02-4807-93b8-82ecbd1152eb"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:29:25 crc kubenswrapper[4779]: I0929 19:29:25.475328 4779 generic.go:334] "Generic (PLEG): container finished" podID="523fd020-2e02-4807-93b8-82ecbd1152eb" containerID="f34ea16198ddb29fae0653ed2ebdd84481e2bc0a905b30a3be2c039009b4e204" exitCode=0 Sep 29 19:29:25 crc kubenswrapper[4779]: I0929 19:29:25.475377 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"523fd020-2e02-4807-93b8-82ecbd1152eb","Type":"ContainerDied","Data":"f34ea16198ddb29fae0653ed2ebdd84481e2bc0a905b30a3be2c039009b4e204"} Sep 29 19:29:25 crc kubenswrapper[4779]: I0929 19:29:25.475407 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"523fd020-2e02-4807-93b8-82ecbd1152eb","Type":"ContainerDied","Data":"0a6a66e89e42551aec18d9db4a532cea2744f4c2a4d82854fcb8ac64bbc284b7"} Sep 29 19:29:25 crc kubenswrapper[4779]: I0929 19:29:25.475427 4779 scope.go:117] "RemoveContainer" containerID="f34ea16198ddb29fae0653ed2ebdd84481e2bc0a905b30a3be2c039009b4e204" Sep 29 19:29:25 crc kubenswrapper[4779]: I0929 19:29:25.475580 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Sep 29 19:29:25 crc kubenswrapper[4779]: I0929 19:29:25.496291 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/523fd020-2e02-4807-93b8-82ecbd1152eb-config-data" (OuterVolumeSpecName: "config-data") pod "523fd020-2e02-4807-93b8-82ecbd1152eb" (UID: "523fd020-2e02-4807-93b8-82ecbd1152eb"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:29:25 crc kubenswrapper[4779]: I0929 19:29:25.512355 4779 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/523fd020-2e02-4807-93b8-82ecbd1152eb-pod-info\") on node \"crc\" DevicePath \"\"" Sep 29 19:29:25 crc kubenswrapper[4779]: I0929 19:29:25.512384 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mpj5n\" (UniqueName: \"kubernetes.io/projected/523fd020-2e02-4807-93b8-82ecbd1152eb-kube-api-access-mpj5n\") on node \"crc\" DevicePath \"\"" Sep 29 19:29:25 crc kubenswrapper[4779]: I0929 19:29:25.512395 4779 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/523fd020-2e02-4807-93b8-82ecbd1152eb-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Sep 29 19:29:25 crc kubenswrapper[4779]: I0929 19:29:25.512403 4779 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/523fd020-2e02-4807-93b8-82ecbd1152eb-rabbitmq-tls\") on node \"crc\" DevicePath \"\"" Sep 29 19:29:25 crc kubenswrapper[4779]: I0929 19:29:25.512430 4779 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") on node \"crc\" " Sep 29 19:29:25 crc kubenswrapper[4779]: I0929 19:29:25.512439 4779 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/523fd020-2e02-4807-93b8-82ecbd1152eb-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Sep 29 19:29:25 crc kubenswrapper[4779]: I0929 19:29:25.512448 4779 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/523fd020-2e02-4807-93b8-82ecbd1152eb-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Sep 29 19:29:25 crc kubenswrapper[4779]: I0929 19:29:25.512456 4779 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/523fd020-2e02-4807-93b8-82ecbd1152eb-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 19:29:25 crc kubenswrapper[4779]: I0929 19:29:25.546066 4779 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage04-crc" (UniqueName: "kubernetes.io/local-volume/local-storage04-crc") on node "crc" Sep 29 19:29:25 crc kubenswrapper[4779]: I0929 19:29:25.548557 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/523fd020-2e02-4807-93b8-82ecbd1152eb-server-conf" (OuterVolumeSpecName: "server-conf") pod "523fd020-2e02-4807-93b8-82ecbd1152eb" (UID: "523fd020-2e02-4807-93b8-82ecbd1152eb"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:29:25 crc kubenswrapper[4779]: I0929 19:29:25.563472 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/523fd020-2e02-4807-93b8-82ecbd1152eb-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "523fd020-2e02-4807-93b8-82ecbd1152eb" (UID: "523fd020-2e02-4807-93b8-82ecbd1152eb"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:29:25 crc kubenswrapper[4779]: I0929 19:29:25.584824 4779 scope.go:117] "RemoveContainer" containerID="9776af2c324857cc31ce4e6ed8bdf415cc299e0676aa7f42511a5dd846f16c9c" Sep 29 19:29:25 crc kubenswrapper[4779]: I0929 19:29:25.607713 4779 scope.go:117] "RemoveContainer" containerID="f34ea16198ddb29fae0653ed2ebdd84481e2bc0a905b30a3be2c039009b4e204" Sep 29 19:29:25 crc kubenswrapper[4779]: E0929 19:29:25.609735 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f34ea16198ddb29fae0653ed2ebdd84481e2bc0a905b30a3be2c039009b4e204\": container with ID starting with f34ea16198ddb29fae0653ed2ebdd84481e2bc0a905b30a3be2c039009b4e204 not found: ID does not exist" containerID="f34ea16198ddb29fae0653ed2ebdd84481e2bc0a905b30a3be2c039009b4e204" Sep 29 19:29:25 crc kubenswrapper[4779]: I0929 19:29:25.609774 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f34ea16198ddb29fae0653ed2ebdd84481e2bc0a905b30a3be2c039009b4e204"} err="failed to get container status \"f34ea16198ddb29fae0653ed2ebdd84481e2bc0a905b30a3be2c039009b4e204\": rpc error: code = NotFound desc = could not find container \"f34ea16198ddb29fae0653ed2ebdd84481e2bc0a905b30a3be2c039009b4e204\": container with ID starting with f34ea16198ddb29fae0653ed2ebdd84481e2bc0a905b30a3be2c039009b4e204 not found: ID does not exist" Sep 29 19:29:25 crc kubenswrapper[4779]: I0929 19:29:25.609801 4779 scope.go:117] "RemoveContainer" containerID="9776af2c324857cc31ce4e6ed8bdf415cc299e0676aa7f42511a5dd846f16c9c" Sep 29 19:29:25 crc kubenswrapper[4779]: E0929 19:29:25.610858 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9776af2c324857cc31ce4e6ed8bdf415cc299e0676aa7f42511a5dd846f16c9c\": container with ID starting with 9776af2c324857cc31ce4e6ed8bdf415cc299e0676aa7f42511a5dd846f16c9c not found: ID does not exist" containerID="9776af2c324857cc31ce4e6ed8bdf415cc299e0676aa7f42511a5dd846f16c9c" Sep 29 19:29:25 crc kubenswrapper[4779]: I0929 19:29:25.610879 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9776af2c324857cc31ce4e6ed8bdf415cc299e0676aa7f42511a5dd846f16c9c"} err="failed to get container status \"9776af2c324857cc31ce4e6ed8bdf415cc299e0676aa7f42511a5dd846f16c9c\": rpc error: code = NotFound desc = could not find container \"9776af2c324857cc31ce4e6ed8bdf415cc299e0676aa7f42511a5dd846f16c9c\": container with ID starting with 9776af2c324857cc31ce4e6ed8bdf415cc299e0676aa7f42511a5dd846f16c9c not found: ID does not exist" Sep 29 19:29:25 crc kubenswrapper[4779]: I0929 19:29:25.614361 4779 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/523fd020-2e02-4807-93b8-82ecbd1152eb-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Sep 29 19:29:25 crc kubenswrapper[4779]: I0929 19:29:25.614381 4779 reconciler_common.go:293] "Volume detached for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") on node \"crc\" DevicePath \"\"" Sep 29 19:29:25 crc kubenswrapper[4779]: I0929 19:29:25.614391 4779 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/523fd020-2e02-4807-93b8-82ecbd1152eb-server-conf\") on node \"crc\" DevicePath \"\"" Sep 29 19:29:25 crc kubenswrapper[4779]: I0929 19:29:25.775693 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Sep 29 19:29:25 crc kubenswrapper[4779]: I0929 19:29:25.819782 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/549de7a5-30db-464d-bd6b-a6dcca25691d-pod-info\") pod \"549de7a5-30db-464d-bd6b-a6dcca25691d\" (UID: \"549de7a5-30db-464d-bd6b-a6dcca25691d\") " Sep 29 19:29:25 crc kubenswrapper[4779]: I0929 19:29:25.819892 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"549de7a5-30db-464d-bd6b-a6dcca25691d\" (UID: \"549de7a5-30db-464d-bd6b-a6dcca25691d\") " Sep 29 19:29:25 crc kubenswrapper[4779]: I0929 19:29:25.819943 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/549de7a5-30db-464d-bd6b-a6dcca25691d-server-conf\") pod \"549de7a5-30db-464d-bd6b-a6dcca25691d\" (UID: \"549de7a5-30db-464d-bd6b-a6dcca25691d\") " Sep 29 19:29:25 crc kubenswrapper[4779]: I0929 19:29:25.820002 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/549de7a5-30db-464d-bd6b-a6dcca25691d-config-data\") pod \"549de7a5-30db-464d-bd6b-a6dcca25691d\" (UID: \"549de7a5-30db-464d-bd6b-a6dcca25691d\") " Sep 29 19:29:25 crc kubenswrapper[4779]: I0929 19:29:25.820043 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/549de7a5-30db-464d-bd6b-a6dcca25691d-plugins-conf\") pod \"549de7a5-30db-464d-bd6b-a6dcca25691d\" (UID: \"549de7a5-30db-464d-bd6b-a6dcca25691d\") " Sep 29 19:29:25 crc kubenswrapper[4779]: I0929 19:29:25.820109 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/549de7a5-30db-464d-bd6b-a6dcca25691d-rabbitmq-plugins\") pod \"549de7a5-30db-464d-bd6b-a6dcca25691d\" (UID: \"549de7a5-30db-464d-bd6b-a6dcca25691d\") " Sep 29 19:29:25 crc kubenswrapper[4779]: I0929 19:29:25.820149 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/549de7a5-30db-464d-bd6b-a6dcca25691d-erlang-cookie-secret\") pod \"549de7a5-30db-464d-bd6b-a6dcca25691d\" (UID: \"549de7a5-30db-464d-bd6b-a6dcca25691d\") " Sep 29 19:29:25 crc kubenswrapper[4779]: I0929 19:29:25.820189 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hgfgg\" (UniqueName: \"kubernetes.io/projected/549de7a5-30db-464d-bd6b-a6dcca25691d-kube-api-access-hgfgg\") pod \"549de7a5-30db-464d-bd6b-a6dcca25691d\" (UID: \"549de7a5-30db-464d-bd6b-a6dcca25691d\") " Sep 29 19:29:25 crc kubenswrapper[4779]: I0929 19:29:25.820218 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/549de7a5-30db-464d-bd6b-a6dcca25691d-rabbitmq-erlang-cookie\") pod \"549de7a5-30db-464d-bd6b-a6dcca25691d\" (UID: \"549de7a5-30db-464d-bd6b-a6dcca25691d\") " Sep 29 19:29:25 crc kubenswrapper[4779]: I0929 19:29:25.820254 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/549de7a5-30db-464d-bd6b-a6dcca25691d-rabbitmq-confd\") pod \"549de7a5-30db-464d-bd6b-a6dcca25691d\" (UID: \"549de7a5-30db-464d-bd6b-a6dcca25691d\") " Sep 29 19:29:25 crc kubenswrapper[4779]: I0929 19:29:25.820287 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/549de7a5-30db-464d-bd6b-a6dcca25691d-rabbitmq-tls\") pod \"549de7a5-30db-464d-bd6b-a6dcca25691d\" (UID: \"549de7a5-30db-464d-bd6b-a6dcca25691d\") " Sep 29 19:29:25 crc kubenswrapper[4779]: I0929 19:29:25.823137 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/549de7a5-30db-464d-bd6b-a6dcca25691d-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "549de7a5-30db-464d-bd6b-a6dcca25691d" (UID: "549de7a5-30db-464d-bd6b-a6dcca25691d"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 19:29:25 crc kubenswrapper[4779]: I0929 19:29:25.823310 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/549de7a5-30db-464d-bd6b-a6dcca25691d-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "549de7a5-30db-464d-bd6b-a6dcca25691d" (UID: "549de7a5-30db-464d-bd6b-a6dcca25691d"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 19:29:25 crc kubenswrapper[4779]: I0929 19:29:25.823880 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/549de7a5-30db-464d-bd6b-a6dcca25691d-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "549de7a5-30db-464d-bd6b-a6dcca25691d" (UID: "549de7a5-30db-464d-bd6b-a6dcca25691d"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:29:25 crc kubenswrapper[4779]: I0929 19:29:25.829390 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage01-crc" (OuterVolumeSpecName: "persistence") pod "549de7a5-30db-464d-bd6b-a6dcca25691d" (UID: "549de7a5-30db-464d-bd6b-a6dcca25691d"). InnerVolumeSpecName "local-storage01-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Sep 29 19:29:25 crc kubenswrapper[4779]: I0929 19:29:25.834657 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/549de7a5-30db-464d-bd6b-a6dcca25691d-pod-info" (OuterVolumeSpecName: "pod-info") pod "549de7a5-30db-464d-bd6b-a6dcca25691d" (UID: "549de7a5-30db-464d-bd6b-a6dcca25691d"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Sep 29 19:29:25 crc kubenswrapper[4779]: I0929 19:29:25.834695 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/549de7a5-30db-464d-bd6b-a6dcca25691d-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "549de7a5-30db-464d-bd6b-a6dcca25691d" (UID: "549de7a5-30db-464d-bd6b-a6dcca25691d"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:29:25 crc kubenswrapper[4779]: I0929 19:29:25.847440 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/549de7a5-30db-464d-bd6b-a6dcca25691d-rabbitmq-tls" (OuterVolumeSpecName: "rabbitmq-tls") pod "549de7a5-30db-464d-bd6b-a6dcca25691d" (UID: "549de7a5-30db-464d-bd6b-a6dcca25691d"). InnerVolumeSpecName "rabbitmq-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:29:25 crc kubenswrapper[4779]: I0929 19:29:25.850476 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/549de7a5-30db-464d-bd6b-a6dcca25691d-kube-api-access-hgfgg" (OuterVolumeSpecName: "kube-api-access-hgfgg") pod "549de7a5-30db-464d-bd6b-a6dcca25691d" (UID: "549de7a5-30db-464d-bd6b-a6dcca25691d"). InnerVolumeSpecName "kube-api-access-hgfgg". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:29:25 crc kubenswrapper[4779]: I0929 19:29:25.853615 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-server-0"] Sep 29 19:29:25 crc kubenswrapper[4779]: I0929 19:29:25.884597 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/549de7a5-30db-464d-bd6b-a6dcca25691d-config-data" (OuterVolumeSpecName: "config-data") pod "549de7a5-30db-464d-bd6b-a6dcca25691d" (UID: "549de7a5-30db-464d-bd6b-a6dcca25691d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:29:25 crc kubenswrapper[4779]: I0929 19:29:25.906039 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/rabbitmq-server-0"] Sep 29 19:29:25 crc kubenswrapper[4779]: I0929 19:29:25.918229 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-server-0"] Sep 29 19:29:25 crc kubenswrapper[4779]: E0929 19:29:25.918816 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="549de7a5-30db-464d-bd6b-a6dcca25691d" containerName="rabbitmq" Sep 29 19:29:25 crc kubenswrapper[4779]: I0929 19:29:25.918837 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="549de7a5-30db-464d-bd6b-a6dcca25691d" containerName="rabbitmq" Sep 29 19:29:25 crc kubenswrapper[4779]: E0929 19:29:25.918850 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="523fd020-2e02-4807-93b8-82ecbd1152eb" containerName="rabbitmq" Sep 29 19:29:25 crc kubenswrapper[4779]: I0929 19:29:25.918861 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="523fd020-2e02-4807-93b8-82ecbd1152eb" containerName="rabbitmq" Sep 29 19:29:25 crc kubenswrapper[4779]: E0929 19:29:25.918893 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="549de7a5-30db-464d-bd6b-a6dcca25691d" containerName="setup-container" Sep 29 19:29:25 crc kubenswrapper[4779]: I0929 19:29:25.918901 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="549de7a5-30db-464d-bd6b-a6dcca25691d" containerName="setup-container" Sep 29 19:29:25 crc kubenswrapper[4779]: E0929 19:29:25.918924 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="523fd020-2e02-4807-93b8-82ecbd1152eb" containerName="setup-container" Sep 29 19:29:25 crc kubenswrapper[4779]: I0929 19:29:25.918934 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="523fd020-2e02-4807-93b8-82ecbd1152eb" containerName="setup-container" Sep 29 19:29:25 crc kubenswrapper[4779]: I0929 19:29:25.919215 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="523fd020-2e02-4807-93b8-82ecbd1152eb" containerName="rabbitmq" Sep 29 19:29:25 crc kubenswrapper[4779]: I0929 19:29:25.919245 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="549de7a5-30db-464d-bd6b-a6dcca25691d" containerName="rabbitmq" Sep 29 19:29:25 crc kubenswrapper[4779]: I0929 19:29:25.920743 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Sep 29 19:29:25 crc kubenswrapper[4779]: I0929 19:29:25.923007 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-config-data" Sep 29 19:29:25 crc kubenswrapper[4779]: I0929 19:29:25.923162 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-svc" Sep 29 19:29:25 crc kubenswrapper[4779]: I0929 19:29:25.923258 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-server-conf" Sep 29 19:29:25 crc kubenswrapper[4779]: I0929 19:29:25.923504 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-default-user" Sep 29 19:29:25 crc kubenswrapper[4779]: I0929 19:29:25.923706 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-server-dockercfg-6clwg" Sep 29 19:29:25 crc kubenswrapper[4779]: I0929 19:29:25.925328 4779 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") on node \"crc\" " Sep 29 19:29:25 crc kubenswrapper[4779]: I0929 19:29:25.925363 4779 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/549de7a5-30db-464d-bd6b-a6dcca25691d-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 19:29:25 crc kubenswrapper[4779]: I0929 19:29:25.925376 4779 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/549de7a5-30db-464d-bd6b-a6dcca25691d-plugins-conf\") on node \"crc\" DevicePath \"\"" Sep 29 19:29:25 crc kubenswrapper[4779]: I0929 19:29:25.925388 4779 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/549de7a5-30db-464d-bd6b-a6dcca25691d-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Sep 29 19:29:25 crc kubenswrapper[4779]: I0929 19:29:25.925399 4779 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/549de7a5-30db-464d-bd6b-a6dcca25691d-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Sep 29 19:29:25 crc kubenswrapper[4779]: I0929 19:29:25.925412 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hgfgg\" (UniqueName: \"kubernetes.io/projected/549de7a5-30db-464d-bd6b-a6dcca25691d-kube-api-access-hgfgg\") on node \"crc\" DevicePath \"\"" Sep 29 19:29:25 crc kubenswrapper[4779]: I0929 19:29:25.925423 4779 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/549de7a5-30db-464d-bd6b-a6dcca25691d-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Sep 29 19:29:25 crc kubenswrapper[4779]: I0929 19:29:25.925434 4779 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/549de7a5-30db-464d-bd6b-a6dcca25691d-rabbitmq-tls\") on node \"crc\" DevicePath \"\"" Sep 29 19:29:25 crc kubenswrapper[4779]: I0929 19:29:25.925444 4779 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/549de7a5-30db-464d-bd6b-a6dcca25691d-pod-info\") on node \"crc\" DevicePath \"\"" Sep 29 19:29:25 crc kubenswrapper[4779]: I0929 19:29:25.925956 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-plugins-conf" Sep 29 19:29:25 crc kubenswrapper[4779]: I0929 19:29:25.926719 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-erlang-cookie" Sep 29 19:29:25 crc kubenswrapper[4779]: I0929 19:29:25.929073 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Sep 29 19:29:25 crc kubenswrapper[4779]: I0929 19:29:25.946552 4779 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage01-crc" (UniqueName: "kubernetes.io/local-volume/local-storage01-crc") on node "crc" Sep 29 19:29:25 crc kubenswrapper[4779]: I0929 19:29:25.952053 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/549de7a5-30db-464d-bd6b-a6dcca25691d-server-conf" (OuterVolumeSpecName: "server-conf") pod "549de7a5-30db-464d-bd6b-a6dcca25691d" (UID: "549de7a5-30db-464d-bd6b-a6dcca25691d"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:29:25 crc kubenswrapper[4779]: I0929 19:29:25.999334 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/549de7a5-30db-464d-bd6b-a6dcca25691d-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "549de7a5-30db-464d-bd6b-a6dcca25691d" (UID: "549de7a5-30db-464d-bd6b-a6dcca25691d"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:29:26 crc kubenswrapper[4779]: I0929 19:29:26.028257 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mhrrm\" (UniqueName: \"kubernetes.io/projected/cde20801-b6a5-444f-ad26-2b36244bb38d-kube-api-access-mhrrm\") pod \"rabbitmq-server-0\" (UID: \"cde20801-b6a5-444f-ad26-2b36244bb38d\") " pod="openstack/rabbitmq-server-0" Sep 29 19:29:26 crc kubenswrapper[4779]: I0929 19:29:26.028790 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"rabbitmq-server-0\" (UID: \"cde20801-b6a5-444f-ad26-2b36244bb38d\") " pod="openstack/rabbitmq-server-0" Sep 29 19:29:26 crc kubenswrapper[4779]: I0929 19:29:26.028845 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/cde20801-b6a5-444f-ad26-2b36244bb38d-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"cde20801-b6a5-444f-ad26-2b36244bb38d\") " pod="openstack/rabbitmq-server-0" Sep 29 19:29:26 crc kubenswrapper[4779]: I0929 19:29:26.028871 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/cde20801-b6a5-444f-ad26-2b36244bb38d-config-data\") pod \"rabbitmq-server-0\" (UID: \"cde20801-b6a5-444f-ad26-2b36244bb38d\") " pod="openstack/rabbitmq-server-0" Sep 29 19:29:26 crc kubenswrapper[4779]: I0929 19:29:26.028913 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/cde20801-b6a5-444f-ad26-2b36244bb38d-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"cde20801-b6a5-444f-ad26-2b36244bb38d\") " pod="openstack/rabbitmq-server-0" Sep 29 19:29:26 crc kubenswrapper[4779]: I0929 19:29:26.028930 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/cde20801-b6a5-444f-ad26-2b36244bb38d-pod-info\") pod \"rabbitmq-server-0\" (UID: \"cde20801-b6a5-444f-ad26-2b36244bb38d\") " pod="openstack/rabbitmq-server-0" Sep 29 19:29:26 crc kubenswrapper[4779]: I0929 19:29:26.028949 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/cde20801-b6a5-444f-ad26-2b36244bb38d-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"cde20801-b6a5-444f-ad26-2b36244bb38d\") " pod="openstack/rabbitmq-server-0" Sep 29 19:29:26 crc kubenswrapper[4779]: I0929 19:29:26.029023 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/cde20801-b6a5-444f-ad26-2b36244bb38d-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"cde20801-b6a5-444f-ad26-2b36244bb38d\") " pod="openstack/rabbitmq-server-0" Sep 29 19:29:26 crc kubenswrapper[4779]: I0929 19:29:26.029054 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/cde20801-b6a5-444f-ad26-2b36244bb38d-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"cde20801-b6a5-444f-ad26-2b36244bb38d\") " pod="openstack/rabbitmq-server-0" Sep 29 19:29:26 crc kubenswrapper[4779]: I0929 19:29:26.029094 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/cde20801-b6a5-444f-ad26-2b36244bb38d-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"cde20801-b6a5-444f-ad26-2b36244bb38d\") " pod="openstack/rabbitmq-server-0" Sep 29 19:29:26 crc kubenswrapper[4779]: I0929 19:29:26.029116 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/cde20801-b6a5-444f-ad26-2b36244bb38d-server-conf\") pod \"rabbitmq-server-0\" (UID: \"cde20801-b6a5-444f-ad26-2b36244bb38d\") " pod="openstack/rabbitmq-server-0" Sep 29 19:29:26 crc kubenswrapper[4779]: I0929 19:29:26.029208 4779 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/549de7a5-30db-464d-bd6b-a6dcca25691d-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Sep 29 19:29:26 crc kubenswrapper[4779]: I0929 19:29:26.029219 4779 reconciler_common.go:293] "Volume detached for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") on node \"crc\" DevicePath \"\"" Sep 29 19:29:26 crc kubenswrapper[4779]: I0929 19:29:26.029229 4779 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/549de7a5-30db-464d-bd6b-a6dcca25691d-server-conf\") on node \"crc\" DevicePath \"\"" Sep 29 19:29:26 crc kubenswrapper[4779]: I0929 19:29:26.131222 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mhrrm\" (UniqueName: \"kubernetes.io/projected/cde20801-b6a5-444f-ad26-2b36244bb38d-kube-api-access-mhrrm\") pod \"rabbitmq-server-0\" (UID: \"cde20801-b6a5-444f-ad26-2b36244bb38d\") " pod="openstack/rabbitmq-server-0" Sep 29 19:29:26 crc kubenswrapper[4779]: I0929 19:29:26.131274 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"rabbitmq-server-0\" (UID: \"cde20801-b6a5-444f-ad26-2b36244bb38d\") " pod="openstack/rabbitmq-server-0" Sep 29 19:29:26 crc kubenswrapper[4779]: I0929 19:29:26.131331 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/cde20801-b6a5-444f-ad26-2b36244bb38d-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"cde20801-b6a5-444f-ad26-2b36244bb38d\") " pod="openstack/rabbitmq-server-0" Sep 29 19:29:26 crc kubenswrapper[4779]: I0929 19:29:26.131360 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/cde20801-b6a5-444f-ad26-2b36244bb38d-config-data\") pod \"rabbitmq-server-0\" (UID: \"cde20801-b6a5-444f-ad26-2b36244bb38d\") " pod="openstack/rabbitmq-server-0" Sep 29 19:29:26 crc kubenswrapper[4779]: I0929 19:29:26.131401 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/cde20801-b6a5-444f-ad26-2b36244bb38d-pod-info\") pod \"rabbitmq-server-0\" (UID: \"cde20801-b6a5-444f-ad26-2b36244bb38d\") " pod="openstack/rabbitmq-server-0" Sep 29 19:29:26 crc kubenswrapper[4779]: I0929 19:29:26.131419 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/cde20801-b6a5-444f-ad26-2b36244bb38d-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"cde20801-b6a5-444f-ad26-2b36244bb38d\") " pod="openstack/rabbitmq-server-0" Sep 29 19:29:26 crc kubenswrapper[4779]: I0929 19:29:26.131442 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/cde20801-b6a5-444f-ad26-2b36244bb38d-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"cde20801-b6a5-444f-ad26-2b36244bb38d\") " pod="openstack/rabbitmq-server-0" Sep 29 19:29:26 crc kubenswrapper[4779]: I0929 19:29:26.131490 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/cde20801-b6a5-444f-ad26-2b36244bb38d-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"cde20801-b6a5-444f-ad26-2b36244bb38d\") " pod="openstack/rabbitmq-server-0" Sep 29 19:29:26 crc kubenswrapper[4779]: I0929 19:29:26.131525 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/cde20801-b6a5-444f-ad26-2b36244bb38d-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"cde20801-b6a5-444f-ad26-2b36244bb38d\") " pod="openstack/rabbitmq-server-0" Sep 29 19:29:26 crc kubenswrapper[4779]: I0929 19:29:26.131571 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/cde20801-b6a5-444f-ad26-2b36244bb38d-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"cde20801-b6a5-444f-ad26-2b36244bb38d\") " pod="openstack/rabbitmq-server-0" Sep 29 19:29:26 crc kubenswrapper[4779]: I0929 19:29:26.131594 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/cde20801-b6a5-444f-ad26-2b36244bb38d-server-conf\") pod \"rabbitmq-server-0\" (UID: \"cde20801-b6a5-444f-ad26-2b36244bb38d\") " pod="openstack/rabbitmq-server-0" Sep 29 19:29:26 crc kubenswrapper[4779]: I0929 19:29:26.131721 4779 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"rabbitmq-server-0\" (UID: \"cde20801-b6a5-444f-ad26-2b36244bb38d\") device mount path \"/mnt/openstack/pv04\"" pod="openstack/rabbitmq-server-0" Sep 29 19:29:26 crc kubenswrapper[4779]: I0929 19:29:26.132049 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/cde20801-b6a5-444f-ad26-2b36244bb38d-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"cde20801-b6a5-444f-ad26-2b36244bb38d\") " pod="openstack/rabbitmq-server-0" Sep 29 19:29:26 crc kubenswrapper[4779]: I0929 19:29:26.132259 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/cde20801-b6a5-444f-ad26-2b36244bb38d-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"cde20801-b6a5-444f-ad26-2b36244bb38d\") " pod="openstack/rabbitmq-server-0" Sep 29 19:29:26 crc kubenswrapper[4779]: I0929 19:29:26.133114 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/cde20801-b6a5-444f-ad26-2b36244bb38d-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"cde20801-b6a5-444f-ad26-2b36244bb38d\") " pod="openstack/rabbitmq-server-0" Sep 29 19:29:26 crc kubenswrapper[4779]: I0929 19:29:26.133932 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/cde20801-b6a5-444f-ad26-2b36244bb38d-config-data\") pod \"rabbitmq-server-0\" (UID: \"cde20801-b6a5-444f-ad26-2b36244bb38d\") " pod="openstack/rabbitmq-server-0" Sep 29 19:29:26 crc kubenswrapper[4779]: I0929 19:29:26.134363 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/cde20801-b6a5-444f-ad26-2b36244bb38d-server-conf\") pod \"rabbitmq-server-0\" (UID: \"cde20801-b6a5-444f-ad26-2b36244bb38d\") " pod="openstack/rabbitmq-server-0" Sep 29 19:29:26 crc kubenswrapper[4779]: I0929 19:29:26.139455 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/cde20801-b6a5-444f-ad26-2b36244bb38d-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"cde20801-b6a5-444f-ad26-2b36244bb38d\") " pod="openstack/rabbitmq-server-0" Sep 29 19:29:26 crc kubenswrapper[4779]: I0929 19:29:26.139710 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/cde20801-b6a5-444f-ad26-2b36244bb38d-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"cde20801-b6a5-444f-ad26-2b36244bb38d\") " pod="openstack/rabbitmq-server-0" Sep 29 19:29:26 crc kubenswrapper[4779]: I0929 19:29:26.140657 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/cde20801-b6a5-444f-ad26-2b36244bb38d-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"cde20801-b6a5-444f-ad26-2b36244bb38d\") " pod="openstack/rabbitmq-server-0" Sep 29 19:29:26 crc kubenswrapper[4779]: I0929 19:29:26.141955 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/cde20801-b6a5-444f-ad26-2b36244bb38d-pod-info\") pod \"rabbitmq-server-0\" (UID: \"cde20801-b6a5-444f-ad26-2b36244bb38d\") " pod="openstack/rabbitmq-server-0" Sep 29 19:29:26 crc kubenswrapper[4779]: I0929 19:29:26.154013 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mhrrm\" (UniqueName: \"kubernetes.io/projected/cde20801-b6a5-444f-ad26-2b36244bb38d-kube-api-access-mhrrm\") pod \"rabbitmq-server-0\" (UID: \"cde20801-b6a5-444f-ad26-2b36244bb38d\") " pod="openstack/rabbitmq-server-0" Sep 29 19:29:26 crc kubenswrapper[4779]: I0929 19:29:26.164591 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"rabbitmq-server-0\" (UID: \"cde20801-b6a5-444f-ad26-2b36244bb38d\") " pod="openstack/rabbitmq-server-0" Sep 29 19:29:26 crc kubenswrapper[4779]: I0929 19:29:26.391270 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Sep 29 19:29:26 crc kubenswrapper[4779]: I0929 19:29:26.487156 4779 generic.go:334] "Generic (PLEG): container finished" podID="549de7a5-30db-464d-bd6b-a6dcca25691d" containerID="769ca8e086cd076049a9814691cb9da9fc29915023965294dedbf3186a6b3bb5" exitCode=0 Sep 29 19:29:26 crc kubenswrapper[4779]: I0929 19:29:26.487213 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Sep 29 19:29:26 crc kubenswrapper[4779]: I0929 19:29:26.487210 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"549de7a5-30db-464d-bd6b-a6dcca25691d","Type":"ContainerDied","Data":"769ca8e086cd076049a9814691cb9da9fc29915023965294dedbf3186a6b3bb5"} Sep 29 19:29:26 crc kubenswrapper[4779]: I0929 19:29:26.487355 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"549de7a5-30db-464d-bd6b-a6dcca25691d","Type":"ContainerDied","Data":"ef04572ec2f4ad5b945e0078c290d645fa64f8c318b652de8427d8752806aa99"} Sep 29 19:29:26 crc kubenswrapper[4779]: I0929 19:29:26.487383 4779 scope.go:117] "RemoveContainer" containerID="769ca8e086cd076049a9814691cb9da9fc29915023965294dedbf3186a6b3bb5" Sep 29 19:29:26 crc kubenswrapper[4779]: I0929 19:29:26.524368 4779 scope.go:117] "RemoveContainer" containerID="debe7210bbb9acdaf59c8ce863e712cd0130f5fa1480136f51dd85b3f8966c43" Sep 29 19:29:26 crc kubenswrapper[4779]: I0929 19:29:26.536406 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Sep 29 19:29:26 crc kubenswrapper[4779]: I0929 19:29:26.558471 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Sep 29 19:29:26 crc kubenswrapper[4779]: I0929 19:29:26.568591 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Sep 29 19:29:26 crc kubenswrapper[4779]: I0929 19:29:26.573264 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Sep 29 19:29:26 crc kubenswrapper[4779]: I0929 19:29:26.580366 4779 scope.go:117] "RemoveContainer" containerID="769ca8e086cd076049a9814691cb9da9fc29915023965294dedbf3186a6b3bb5" Sep 29 19:29:26 crc kubenswrapper[4779]: E0929 19:29:26.591590 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"769ca8e086cd076049a9814691cb9da9fc29915023965294dedbf3186a6b3bb5\": container with ID starting with 769ca8e086cd076049a9814691cb9da9fc29915023965294dedbf3186a6b3bb5 not found: ID does not exist" containerID="769ca8e086cd076049a9814691cb9da9fc29915023965294dedbf3186a6b3bb5" Sep 29 19:29:26 crc kubenswrapper[4779]: I0929 19:29:26.591630 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"769ca8e086cd076049a9814691cb9da9fc29915023965294dedbf3186a6b3bb5"} err="failed to get container status \"769ca8e086cd076049a9814691cb9da9fc29915023965294dedbf3186a6b3bb5\": rpc error: code = NotFound desc = could not find container \"769ca8e086cd076049a9814691cb9da9fc29915023965294dedbf3186a6b3bb5\": container with ID starting with 769ca8e086cd076049a9814691cb9da9fc29915023965294dedbf3186a6b3bb5 not found: ID does not exist" Sep 29 19:29:26 crc kubenswrapper[4779]: I0929 19:29:26.591656 4779 scope.go:117] "RemoveContainer" containerID="debe7210bbb9acdaf59c8ce863e712cd0130f5fa1480136f51dd85b3f8966c43" Sep 29 19:29:26 crc kubenswrapper[4779]: I0929 19:29:26.592055 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-config-data" Sep 29 19:29:26 crc kubenswrapper[4779]: I0929 19:29:26.592187 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-server-conf" Sep 29 19:29:26 crc kubenswrapper[4779]: E0929 19:29:26.592262 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"debe7210bbb9acdaf59c8ce863e712cd0130f5fa1480136f51dd85b3f8966c43\": container with ID starting with debe7210bbb9acdaf59c8ce863e712cd0130f5fa1480136f51dd85b3f8966c43 not found: ID does not exist" containerID="debe7210bbb9acdaf59c8ce863e712cd0130f5fa1480136f51dd85b3f8966c43" Sep 29 19:29:26 crc kubenswrapper[4779]: I0929 19:29:26.592279 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"debe7210bbb9acdaf59c8ce863e712cd0130f5fa1480136f51dd85b3f8966c43"} err="failed to get container status \"debe7210bbb9acdaf59c8ce863e712cd0130f5fa1480136f51dd85b3f8966c43\": rpc error: code = NotFound desc = could not find container \"debe7210bbb9acdaf59c8ce863e712cd0130f5fa1480136f51dd85b3f8966c43\": container with ID starting with debe7210bbb9acdaf59c8ce863e712cd0130f5fa1480136f51dd85b3f8966c43 not found: ID does not exist" Sep 29 19:29:26 crc kubenswrapper[4779]: I0929 19:29:26.592283 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-server-dockercfg-44wqk" Sep 29 19:29:26 crc kubenswrapper[4779]: I0929 19:29:26.592370 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-cell1-svc" Sep 29 19:29:26 crc kubenswrapper[4779]: I0929 19:29:26.592447 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-plugins-conf" Sep 29 19:29:26 crc kubenswrapper[4779]: I0929 19:29:26.592452 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-erlang-cookie" Sep 29 19:29:26 crc kubenswrapper[4779]: I0929 19:29:26.592711 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-default-user" Sep 29 19:29:26 crc kubenswrapper[4779]: I0929 19:29:26.595724 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Sep 29 19:29:26 crc kubenswrapper[4779]: I0929 19:29:26.657206 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/e0f654c2-f8a3-4049-a18c-75f12edc65ca-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"e0f654c2-f8a3-4049-a18c-75f12edc65ca\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 19:29:26 crc kubenswrapper[4779]: I0929 19:29:26.657290 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/e0f654c2-f8a3-4049-a18c-75f12edc65ca-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"e0f654c2-f8a3-4049-a18c-75f12edc65ca\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 19:29:26 crc kubenswrapper[4779]: I0929 19:29:26.657340 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/e0f654c2-f8a3-4049-a18c-75f12edc65ca-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"e0f654c2-f8a3-4049-a18c-75f12edc65ca\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 19:29:26 crc kubenswrapper[4779]: I0929 19:29:26.657364 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/e0f654c2-f8a3-4049-a18c-75f12edc65ca-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"e0f654c2-f8a3-4049-a18c-75f12edc65ca\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 19:29:26 crc kubenswrapper[4779]: I0929 19:29:26.657383 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"e0f654c2-f8a3-4049-a18c-75f12edc65ca\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 19:29:26 crc kubenswrapper[4779]: I0929 19:29:26.657401 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/e0f654c2-f8a3-4049-a18c-75f12edc65ca-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"e0f654c2-f8a3-4049-a18c-75f12edc65ca\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 19:29:26 crc kubenswrapper[4779]: I0929 19:29:26.657423 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zkgdw\" (UniqueName: \"kubernetes.io/projected/e0f654c2-f8a3-4049-a18c-75f12edc65ca-kube-api-access-zkgdw\") pod \"rabbitmq-cell1-server-0\" (UID: \"e0f654c2-f8a3-4049-a18c-75f12edc65ca\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 19:29:26 crc kubenswrapper[4779]: I0929 19:29:26.657443 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/e0f654c2-f8a3-4049-a18c-75f12edc65ca-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"e0f654c2-f8a3-4049-a18c-75f12edc65ca\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 19:29:26 crc kubenswrapper[4779]: I0929 19:29:26.657459 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/e0f654c2-f8a3-4049-a18c-75f12edc65ca-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"e0f654c2-f8a3-4049-a18c-75f12edc65ca\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 19:29:26 crc kubenswrapper[4779]: I0929 19:29:26.657488 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/e0f654c2-f8a3-4049-a18c-75f12edc65ca-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"e0f654c2-f8a3-4049-a18c-75f12edc65ca\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 19:29:26 crc kubenswrapper[4779]: I0929 19:29:26.657510 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/e0f654c2-f8a3-4049-a18c-75f12edc65ca-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"e0f654c2-f8a3-4049-a18c-75f12edc65ca\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 19:29:26 crc kubenswrapper[4779]: I0929 19:29:26.759230 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/e0f654c2-f8a3-4049-a18c-75f12edc65ca-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"e0f654c2-f8a3-4049-a18c-75f12edc65ca\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 19:29:26 crc kubenswrapper[4779]: I0929 19:29:26.759309 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/e0f654c2-f8a3-4049-a18c-75f12edc65ca-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"e0f654c2-f8a3-4049-a18c-75f12edc65ca\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 19:29:26 crc kubenswrapper[4779]: I0929 19:29:26.759366 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/e0f654c2-f8a3-4049-a18c-75f12edc65ca-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"e0f654c2-f8a3-4049-a18c-75f12edc65ca\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 19:29:26 crc kubenswrapper[4779]: I0929 19:29:26.759385 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"e0f654c2-f8a3-4049-a18c-75f12edc65ca\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 19:29:26 crc kubenswrapper[4779]: I0929 19:29:26.759427 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/e0f654c2-f8a3-4049-a18c-75f12edc65ca-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"e0f654c2-f8a3-4049-a18c-75f12edc65ca\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 19:29:26 crc kubenswrapper[4779]: I0929 19:29:26.759452 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zkgdw\" (UniqueName: \"kubernetes.io/projected/e0f654c2-f8a3-4049-a18c-75f12edc65ca-kube-api-access-zkgdw\") pod \"rabbitmq-cell1-server-0\" (UID: \"e0f654c2-f8a3-4049-a18c-75f12edc65ca\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 19:29:26 crc kubenswrapper[4779]: I0929 19:29:26.759471 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/e0f654c2-f8a3-4049-a18c-75f12edc65ca-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"e0f654c2-f8a3-4049-a18c-75f12edc65ca\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 19:29:26 crc kubenswrapper[4779]: I0929 19:29:26.759514 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/e0f654c2-f8a3-4049-a18c-75f12edc65ca-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"e0f654c2-f8a3-4049-a18c-75f12edc65ca\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 19:29:26 crc kubenswrapper[4779]: I0929 19:29:26.759544 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/e0f654c2-f8a3-4049-a18c-75f12edc65ca-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"e0f654c2-f8a3-4049-a18c-75f12edc65ca\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 19:29:26 crc kubenswrapper[4779]: I0929 19:29:26.759589 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/e0f654c2-f8a3-4049-a18c-75f12edc65ca-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"e0f654c2-f8a3-4049-a18c-75f12edc65ca\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 19:29:26 crc kubenswrapper[4779]: I0929 19:29:26.759657 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/e0f654c2-f8a3-4049-a18c-75f12edc65ca-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"e0f654c2-f8a3-4049-a18c-75f12edc65ca\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 19:29:26 crc kubenswrapper[4779]: I0929 19:29:26.760858 4779 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"e0f654c2-f8a3-4049-a18c-75f12edc65ca\") device mount path \"/mnt/openstack/pv01\"" pod="openstack/rabbitmq-cell1-server-0" Sep 29 19:29:26 crc kubenswrapper[4779]: I0929 19:29:26.761409 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/e0f654c2-f8a3-4049-a18c-75f12edc65ca-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"e0f654c2-f8a3-4049-a18c-75f12edc65ca\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 19:29:26 crc kubenswrapper[4779]: I0929 19:29:26.761431 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/e0f654c2-f8a3-4049-a18c-75f12edc65ca-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"e0f654c2-f8a3-4049-a18c-75f12edc65ca\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 19:29:26 crc kubenswrapper[4779]: I0929 19:29:26.766559 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/e0f654c2-f8a3-4049-a18c-75f12edc65ca-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"e0f654c2-f8a3-4049-a18c-75f12edc65ca\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 19:29:26 crc kubenswrapper[4779]: I0929 19:29:26.766662 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/e0f654c2-f8a3-4049-a18c-75f12edc65ca-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"e0f654c2-f8a3-4049-a18c-75f12edc65ca\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 19:29:26 crc kubenswrapper[4779]: I0929 19:29:26.769282 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/e0f654c2-f8a3-4049-a18c-75f12edc65ca-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"e0f654c2-f8a3-4049-a18c-75f12edc65ca\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 19:29:26 crc kubenswrapper[4779]: I0929 19:29:26.769653 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/e0f654c2-f8a3-4049-a18c-75f12edc65ca-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"e0f654c2-f8a3-4049-a18c-75f12edc65ca\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 19:29:26 crc kubenswrapper[4779]: I0929 19:29:26.770304 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/e0f654c2-f8a3-4049-a18c-75f12edc65ca-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"e0f654c2-f8a3-4049-a18c-75f12edc65ca\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 19:29:26 crc kubenswrapper[4779]: I0929 19:29:26.781758 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/e0f654c2-f8a3-4049-a18c-75f12edc65ca-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"e0f654c2-f8a3-4049-a18c-75f12edc65ca\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 19:29:26 crc kubenswrapper[4779]: I0929 19:29:26.792366 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/e0f654c2-f8a3-4049-a18c-75f12edc65ca-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"e0f654c2-f8a3-4049-a18c-75f12edc65ca\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 19:29:26 crc kubenswrapper[4779]: I0929 19:29:26.795586 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zkgdw\" (UniqueName: \"kubernetes.io/projected/e0f654c2-f8a3-4049-a18c-75f12edc65ca-kube-api-access-zkgdw\") pod \"rabbitmq-cell1-server-0\" (UID: \"e0f654c2-f8a3-4049-a18c-75f12edc65ca\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 19:29:26 crc kubenswrapper[4779]: I0929 19:29:26.807358 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"e0f654c2-f8a3-4049-a18c-75f12edc65ca\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 19:29:26 crc kubenswrapper[4779]: I0929 19:29:26.916763 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Sep 29 19:29:26 crc kubenswrapper[4779]: I0929 19:29:26.932685 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Sep 29 19:29:27 crc kubenswrapper[4779]: I0929 19:29:27.413373 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Sep 29 19:29:27 crc kubenswrapper[4779]: I0929 19:29:27.496832 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"cde20801-b6a5-444f-ad26-2b36244bb38d","Type":"ContainerStarted","Data":"01eb00c878841d80791ec9951bc143fdf1d5a6b750a8d54ab2ff6ee17d07c624"} Sep 29 19:29:27 crc kubenswrapper[4779]: I0929 19:29:27.496888 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"cde20801-b6a5-444f-ad26-2b36244bb38d","Type":"ContainerStarted","Data":"84e873eb8cc779db5eb6c973664dc526984b5dc9718202bed69f8ce47990840d"} Sep 29 19:29:27 crc kubenswrapper[4779]: I0929 19:29:27.498017 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"e0f654c2-f8a3-4049-a18c-75f12edc65ca","Type":"ContainerStarted","Data":"e426e8af6c3871f40e314483d453df32c5a136cc98a1246d0949274f543b64ab"} Sep 29 19:29:27 crc kubenswrapper[4779]: I0929 19:29:27.780110 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="523fd020-2e02-4807-93b8-82ecbd1152eb" path="/var/lib/kubelet/pods/523fd020-2e02-4807-93b8-82ecbd1152eb/volumes" Sep 29 19:29:27 crc kubenswrapper[4779]: I0929 19:29:27.781807 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="549de7a5-30db-464d-bd6b-a6dcca25691d" path="/var/lib/kubelet/pods/549de7a5-30db-464d-bd6b-a6dcca25691d/volumes" Sep 29 19:29:27 crc kubenswrapper[4779]: I0929 19:29:27.790771 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-67b789f86c-pbkzj"] Sep 29 19:29:27 crc kubenswrapper[4779]: I0929 19:29:27.792238 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-67b789f86c-pbkzj" Sep 29 19:29:27 crc kubenswrapper[4779]: I0929 19:29:27.795472 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-edpm-ipam" Sep 29 19:29:27 crc kubenswrapper[4779]: I0929 19:29:27.808764 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-67b789f86c-pbkzj"] Sep 29 19:29:27 crc kubenswrapper[4779]: I0929 19:29:27.880534 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5e7c7425-85ad-492f-8e9a-6172f8e383eb-ovsdbserver-sb\") pod \"dnsmasq-dns-67b789f86c-pbkzj\" (UID: \"5e7c7425-85ad-492f-8e9a-6172f8e383eb\") " pod="openstack/dnsmasq-dns-67b789f86c-pbkzj" Sep 29 19:29:27 crc kubenswrapper[4779]: I0929 19:29:27.880830 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/5e7c7425-85ad-492f-8e9a-6172f8e383eb-dns-swift-storage-0\") pod \"dnsmasq-dns-67b789f86c-pbkzj\" (UID: \"5e7c7425-85ad-492f-8e9a-6172f8e383eb\") " pod="openstack/dnsmasq-dns-67b789f86c-pbkzj" Sep 29 19:29:27 crc kubenswrapper[4779]: I0929 19:29:27.880855 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5e7c7425-85ad-492f-8e9a-6172f8e383eb-config\") pod \"dnsmasq-dns-67b789f86c-pbkzj\" (UID: \"5e7c7425-85ad-492f-8e9a-6172f8e383eb\") " pod="openstack/dnsmasq-dns-67b789f86c-pbkzj" Sep 29 19:29:27 crc kubenswrapper[4779]: I0929 19:29:27.880924 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/5e7c7425-85ad-492f-8e9a-6172f8e383eb-openstack-edpm-ipam\") pod \"dnsmasq-dns-67b789f86c-pbkzj\" (UID: \"5e7c7425-85ad-492f-8e9a-6172f8e383eb\") " pod="openstack/dnsmasq-dns-67b789f86c-pbkzj" Sep 29 19:29:27 crc kubenswrapper[4779]: I0929 19:29:27.880954 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5e7c7425-85ad-492f-8e9a-6172f8e383eb-dns-svc\") pod \"dnsmasq-dns-67b789f86c-pbkzj\" (UID: \"5e7c7425-85ad-492f-8e9a-6172f8e383eb\") " pod="openstack/dnsmasq-dns-67b789f86c-pbkzj" Sep 29 19:29:27 crc kubenswrapper[4779]: I0929 19:29:27.881005 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-scxq8\" (UniqueName: \"kubernetes.io/projected/5e7c7425-85ad-492f-8e9a-6172f8e383eb-kube-api-access-scxq8\") pod \"dnsmasq-dns-67b789f86c-pbkzj\" (UID: \"5e7c7425-85ad-492f-8e9a-6172f8e383eb\") " pod="openstack/dnsmasq-dns-67b789f86c-pbkzj" Sep 29 19:29:27 crc kubenswrapper[4779]: I0929 19:29:27.881055 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5e7c7425-85ad-492f-8e9a-6172f8e383eb-ovsdbserver-nb\") pod \"dnsmasq-dns-67b789f86c-pbkzj\" (UID: \"5e7c7425-85ad-492f-8e9a-6172f8e383eb\") " pod="openstack/dnsmasq-dns-67b789f86c-pbkzj" Sep 29 19:29:27 crc kubenswrapper[4779]: I0929 19:29:27.982593 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5e7c7425-85ad-492f-8e9a-6172f8e383eb-ovsdbserver-nb\") pod \"dnsmasq-dns-67b789f86c-pbkzj\" (UID: \"5e7c7425-85ad-492f-8e9a-6172f8e383eb\") " pod="openstack/dnsmasq-dns-67b789f86c-pbkzj" Sep 29 19:29:27 crc kubenswrapper[4779]: I0929 19:29:27.982954 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5e7c7425-85ad-492f-8e9a-6172f8e383eb-ovsdbserver-sb\") pod \"dnsmasq-dns-67b789f86c-pbkzj\" (UID: \"5e7c7425-85ad-492f-8e9a-6172f8e383eb\") " pod="openstack/dnsmasq-dns-67b789f86c-pbkzj" Sep 29 19:29:27 crc kubenswrapper[4779]: I0929 19:29:27.983073 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/5e7c7425-85ad-492f-8e9a-6172f8e383eb-dns-swift-storage-0\") pod \"dnsmasq-dns-67b789f86c-pbkzj\" (UID: \"5e7c7425-85ad-492f-8e9a-6172f8e383eb\") " pod="openstack/dnsmasq-dns-67b789f86c-pbkzj" Sep 29 19:29:27 crc kubenswrapper[4779]: I0929 19:29:27.983213 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5e7c7425-85ad-492f-8e9a-6172f8e383eb-config\") pod \"dnsmasq-dns-67b789f86c-pbkzj\" (UID: \"5e7c7425-85ad-492f-8e9a-6172f8e383eb\") " pod="openstack/dnsmasq-dns-67b789f86c-pbkzj" Sep 29 19:29:27 crc kubenswrapper[4779]: I0929 19:29:27.983406 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/5e7c7425-85ad-492f-8e9a-6172f8e383eb-openstack-edpm-ipam\") pod \"dnsmasq-dns-67b789f86c-pbkzj\" (UID: \"5e7c7425-85ad-492f-8e9a-6172f8e383eb\") " pod="openstack/dnsmasq-dns-67b789f86c-pbkzj" Sep 29 19:29:27 crc kubenswrapper[4779]: I0929 19:29:27.983540 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5e7c7425-85ad-492f-8e9a-6172f8e383eb-dns-svc\") pod \"dnsmasq-dns-67b789f86c-pbkzj\" (UID: \"5e7c7425-85ad-492f-8e9a-6172f8e383eb\") " pod="openstack/dnsmasq-dns-67b789f86c-pbkzj" Sep 29 19:29:27 crc kubenswrapper[4779]: I0929 19:29:27.983635 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-scxq8\" (UniqueName: \"kubernetes.io/projected/5e7c7425-85ad-492f-8e9a-6172f8e383eb-kube-api-access-scxq8\") pod \"dnsmasq-dns-67b789f86c-pbkzj\" (UID: \"5e7c7425-85ad-492f-8e9a-6172f8e383eb\") " pod="openstack/dnsmasq-dns-67b789f86c-pbkzj" Sep 29 19:29:27 crc kubenswrapper[4779]: I0929 19:29:27.984688 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/5e7c7425-85ad-492f-8e9a-6172f8e383eb-openstack-edpm-ipam\") pod \"dnsmasq-dns-67b789f86c-pbkzj\" (UID: \"5e7c7425-85ad-492f-8e9a-6172f8e383eb\") " pod="openstack/dnsmasq-dns-67b789f86c-pbkzj" Sep 29 19:29:27 crc kubenswrapper[4779]: I0929 19:29:27.984681 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5e7c7425-85ad-492f-8e9a-6172f8e383eb-ovsdbserver-nb\") pod \"dnsmasq-dns-67b789f86c-pbkzj\" (UID: \"5e7c7425-85ad-492f-8e9a-6172f8e383eb\") " pod="openstack/dnsmasq-dns-67b789f86c-pbkzj" Sep 29 19:29:27 crc kubenswrapper[4779]: I0929 19:29:27.984762 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/5e7c7425-85ad-492f-8e9a-6172f8e383eb-dns-swift-storage-0\") pod \"dnsmasq-dns-67b789f86c-pbkzj\" (UID: \"5e7c7425-85ad-492f-8e9a-6172f8e383eb\") " pod="openstack/dnsmasq-dns-67b789f86c-pbkzj" Sep 29 19:29:27 crc kubenswrapper[4779]: I0929 19:29:27.984776 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5e7c7425-85ad-492f-8e9a-6172f8e383eb-config\") pod \"dnsmasq-dns-67b789f86c-pbkzj\" (UID: \"5e7c7425-85ad-492f-8e9a-6172f8e383eb\") " pod="openstack/dnsmasq-dns-67b789f86c-pbkzj" Sep 29 19:29:27 crc kubenswrapper[4779]: I0929 19:29:27.985075 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5e7c7425-85ad-492f-8e9a-6172f8e383eb-dns-svc\") pod \"dnsmasq-dns-67b789f86c-pbkzj\" (UID: \"5e7c7425-85ad-492f-8e9a-6172f8e383eb\") " pod="openstack/dnsmasq-dns-67b789f86c-pbkzj" Sep 29 19:29:27 crc kubenswrapper[4779]: I0929 19:29:27.985639 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5e7c7425-85ad-492f-8e9a-6172f8e383eb-ovsdbserver-sb\") pod \"dnsmasq-dns-67b789f86c-pbkzj\" (UID: \"5e7c7425-85ad-492f-8e9a-6172f8e383eb\") " pod="openstack/dnsmasq-dns-67b789f86c-pbkzj" Sep 29 19:29:28 crc kubenswrapper[4779]: I0929 19:29:28.007448 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-scxq8\" (UniqueName: \"kubernetes.io/projected/5e7c7425-85ad-492f-8e9a-6172f8e383eb-kube-api-access-scxq8\") pod \"dnsmasq-dns-67b789f86c-pbkzj\" (UID: \"5e7c7425-85ad-492f-8e9a-6172f8e383eb\") " pod="openstack/dnsmasq-dns-67b789f86c-pbkzj" Sep 29 19:29:28 crc kubenswrapper[4779]: I0929 19:29:28.113972 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-67b789f86c-pbkzj" Sep 29 19:29:28 crc kubenswrapper[4779]: I0929 19:29:28.507312 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"e0f654c2-f8a3-4049-a18c-75f12edc65ca","Type":"ContainerStarted","Data":"88f08604e8961c9925a92c07398b8c3e4864b53558b58a7cd588e37b24b3b959"} Sep 29 19:29:28 crc kubenswrapper[4779]: I0929 19:29:28.562657 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-67b789f86c-pbkzj"] Sep 29 19:29:29 crc kubenswrapper[4779]: I0929 19:29:29.520167 4779 generic.go:334] "Generic (PLEG): container finished" podID="5e7c7425-85ad-492f-8e9a-6172f8e383eb" containerID="cf2d8e991aacd485920170bf0276eee1bfa705b39aad9860a48b4a8e08a09360" exitCode=0 Sep 29 19:29:29 crc kubenswrapper[4779]: I0929 19:29:29.520288 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-67b789f86c-pbkzj" event={"ID":"5e7c7425-85ad-492f-8e9a-6172f8e383eb","Type":"ContainerDied","Data":"cf2d8e991aacd485920170bf0276eee1bfa705b39aad9860a48b4a8e08a09360"} Sep 29 19:29:29 crc kubenswrapper[4779]: I0929 19:29:29.521108 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-67b789f86c-pbkzj" event={"ID":"5e7c7425-85ad-492f-8e9a-6172f8e383eb","Type":"ContainerStarted","Data":"5dcf0ab615e56b803f683cd514ca418f78f3fe98083421904e02efb4c29b3a6e"} Sep 29 19:29:30 crc kubenswrapper[4779]: I0929 19:29:30.537583 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-67b789f86c-pbkzj" event={"ID":"5e7c7425-85ad-492f-8e9a-6172f8e383eb","Type":"ContainerStarted","Data":"56826cbabdefcc09b4a9adb4df1637b1e7a5a31b73ad2da3437801f94f5c8e78"} Sep 29 19:29:30 crc kubenswrapper[4779]: I0929 19:29:30.538072 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-67b789f86c-pbkzj" Sep 29 19:29:30 crc kubenswrapper[4779]: I0929 19:29:30.571441 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-67b789f86c-pbkzj" podStartSLOduration=3.571410271 podStartE2EDuration="3.571410271s" podCreationTimestamp="2025-09-29 19:29:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 19:29:30.564105302 +0000 UTC m=+1281.448530412" watchObservedRunningTime="2025-09-29 19:29:30.571410271 +0000 UTC m=+1281.455835471" Sep 29 19:29:38 crc kubenswrapper[4779]: I0929 19:29:38.115459 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-67b789f86c-pbkzj" Sep 29 19:29:38 crc kubenswrapper[4779]: I0929 19:29:38.172492 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-59cf4bdb65-mzp42"] Sep 29 19:29:38 crc kubenswrapper[4779]: I0929 19:29:38.172736 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-59cf4bdb65-mzp42" podUID="ac6039d1-710e-4a9b-b96b-c20dd2c9937f" containerName="dnsmasq-dns" containerID="cri-o://9b603cf1ac3e2424b11fe2c76a37312176ae2d390cb29bb7cdebde69ea689d1f" gracePeriod=10 Sep 29 19:29:38 crc kubenswrapper[4779]: I0929 19:29:38.504488 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-cb6ffcf87-x2rzr"] Sep 29 19:29:38 crc kubenswrapper[4779]: I0929 19:29:38.506425 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-cb6ffcf87-x2rzr" Sep 29 19:29:38 crc kubenswrapper[4779]: I0929 19:29:38.514418 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-cb6ffcf87-x2rzr"] Sep 29 19:29:38 crc kubenswrapper[4779]: I0929 19:29:38.579180 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5e41bca4-c1c5-4a1f-b5a2-4f1d2af086a4-ovsdbserver-sb\") pod \"dnsmasq-dns-cb6ffcf87-x2rzr\" (UID: \"5e41bca4-c1c5-4a1f-b5a2-4f1d2af086a4\") " pod="openstack/dnsmasq-dns-cb6ffcf87-x2rzr" Sep 29 19:29:38 crc kubenswrapper[4779]: I0929 19:29:38.579251 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/5e41bca4-c1c5-4a1f-b5a2-4f1d2af086a4-dns-swift-storage-0\") pod \"dnsmasq-dns-cb6ffcf87-x2rzr\" (UID: \"5e41bca4-c1c5-4a1f-b5a2-4f1d2af086a4\") " pod="openstack/dnsmasq-dns-cb6ffcf87-x2rzr" Sep 29 19:29:38 crc kubenswrapper[4779]: I0929 19:29:38.579302 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5e41bca4-c1c5-4a1f-b5a2-4f1d2af086a4-dns-svc\") pod \"dnsmasq-dns-cb6ffcf87-x2rzr\" (UID: \"5e41bca4-c1c5-4a1f-b5a2-4f1d2af086a4\") " pod="openstack/dnsmasq-dns-cb6ffcf87-x2rzr" Sep 29 19:29:38 crc kubenswrapper[4779]: I0929 19:29:38.579331 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xj8d8\" (UniqueName: \"kubernetes.io/projected/5e41bca4-c1c5-4a1f-b5a2-4f1d2af086a4-kube-api-access-xj8d8\") pod \"dnsmasq-dns-cb6ffcf87-x2rzr\" (UID: \"5e41bca4-c1c5-4a1f-b5a2-4f1d2af086a4\") " pod="openstack/dnsmasq-dns-cb6ffcf87-x2rzr" Sep 29 19:29:38 crc kubenswrapper[4779]: I0929 19:29:38.579430 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5e41bca4-c1c5-4a1f-b5a2-4f1d2af086a4-config\") pod \"dnsmasq-dns-cb6ffcf87-x2rzr\" (UID: \"5e41bca4-c1c5-4a1f-b5a2-4f1d2af086a4\") " pod="openstack/dnsmasq-dns-cb6ffcf87-x2rzr" Sep 29 19:29:38 crc kubenswrapper[4779]: I0929 19:29:38.579466 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/5e41bca4-c1c5-4a1f-b5a2-4f1d2af086a4-openstack-edpm-ipam\") pod \"dnsmasq-dns-cb6ffcf87-x2rzr\" (UID: \"5e41bca4-c1c5-4a1f-b5a2-4f1d2af086a4\") " pod="openstack/dnsmasq-dns-cb6ffcf87-x2rzr" Sep 29 19:29:38 crc kubenswrapper[4779]: I0929 19:29:38.579641 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5e41bca4-c1c5-4a1f-b5a2-4f1d2af086a4-ovsdbserver-nb\") pod \"dnsmasq-dns-cb6ffcf87-x2rzr\" (UID: \"5e41bca4-c1c5-4a1f-b5a2-4f1d2af086a4\") " pod="openstack/dnsmasq-dns-cb6ffcf87-x2rzr" Sep 29 19:29:38 crc kubenswrapper[4779]: I0929 19:29:38.640968 4779 generic.go:334] "Generic (PLEG): container finished" podID="ac6039d1-710e-4a9b-b96b-c20dd2c9937f" containerID="9b603cf1ac3e2424b11fe2c76a37312176ae2d390cb29bb7cdebde69ea689d1f" exitCode=0 Sep 29 19:29:38 crc kubenswrapper[4779]: I0929 19:29:38.641010 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-59cf4bdb65-mzp42" event={"ID":"ac6039d1-710e-4a9b-b96b-c20dd2c9937f","Type":"ContainerDied","Data":"9b603cf1ac3e2424b11fe2c76a37312176ae2d390cb29bb7cdebde69ea689d1f"} Sep 29 19:29:38 crc kubenswrapper[4779]: I0929 19:29:38.681737 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5e41bca4-c1c5-4a1f-b5a2-4f1d2af086a4-dns-svc\") pod \"dnsmasq-dns-cb6ffcf87-x2rzr\" (UID: \"5e41bca4-c1c5-4a1f-b5a2-4f1d2af086a4\") " pod="openstack/dnsmasq-dns-cb6ffcf87-x2rzr" Sep 29 19:29:38 crc kubenswrapper[4779]: I0929 19:29:38.681793 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xj8d8\" (UniqueName: \"kubernetes.io/projected/5e41bca4-c1c5-4a1f-b5a2-4f1d2af086a4-kube-api-access-xj8d8\") pod \"dnsmasq-dns-cb6ffcf87-x2rzr\" (UID: \"5e41bca4-c1c5-4a1f-b5a2-4f1d2af086a4\") " pod="openstack/dnsmasq-dns-cb6ffcf87-x2rzr" Sep 29 19:29:38 crc kubenswrapper[4779]: I0929 19:29:38.681870 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5e41bca4-c1c5-4a1f-b5a2-4f1d2af086a4-config\") pod \"dnsmasq-dns-cb6ffcf87-x2rzr\" (UID: \"5e41bca4-c1c5-4a1f-b5a2-4f1d2af086a4\") " pod="openstack/dnsmasq-dns-cb6ffcf87-x2rzr" Sep 29 19:29:38 crc kubenswrapper[4779]: I0929 19:29:38.681903 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/5e41bca4-c1c5-4a1f-b5a2-4f1d2af086a4-openstack-edpm-ipam\") pod \"dnsmasq-dns-cb6ffcf87-x2rzr\" (UID: \"5e41bca4-c1c5-4a1f-b5a2-4f1d2af086a4\") " pod="openstack/dnsmasq-dns-cb6ffcf87-x2rzr" Sep 29 19:29:38 crc kubenswrapper[4779]: I0929 19:29:38.682005 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5e41bca4-c1c5-4a1f-b5a2-4f1d2af086a4-ovsdbserver-nb\") pod \"dnsmasq-dns-cb6ffcf87-x2rzr\" (UID: \"5e41bca4-c1c5-4a1f-b5a2-4f1d2af086a4\") " pod="openstack/dnsmasq-dns-cb6ffcf87-x2rzr" Sep 29 19:29:38 crc kubenswrapper[4779]: I0929 19:29:38.682070 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5e41bca4-c1c5-4a1f-b5a2-4f1d2af086a4-ovsdbserver-sb\") pod \"dnsmasq-dns-cb6ffcf87-x2rzr\" (UID: \"5e41bca4-c1c5-4a1f-b5a2-4f1d2af086a4\") " pod="openstack/dnsmasq-dns-cb6ffcf87-x2rzr" Sep 29 19:29:38 crc kubenswrapper[4779]: I0929 19:29:38.682105 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/5e41bca4-c1c5-4a1f-b5a2-4f1d2af086a4-dns-swift-storage-0\") pod \"dnsmasq-dns-cb6ffcf87-x2rzr\" (UID: \"5e41bca4-c1c5-4a1f-b5a2-4f1d2af086a4\") " pod="openstack/dnsmasq-dns-cb6ffcf87-x2rzr" Sep 29 19:29:38 crc kubenswrapper[4779]: I0929 19:29:38.683263 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5e41bca4-c1c5-4a1f-b5a2-4f1d2af086a4-config\") pod \"dnsmasq-dns-cb6ffcf87-x2rzr\" (UID: \"5e41bca4-c1c5-4a1f-b5a2-4f1d2af086a4\") " pod="openstack/dnsmasq-dns-cb6ffcf87-x2rzr" Sep 29 19:29:38 crc kubenswrapper[4779]: I0929 19:29:38.683342 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/5e41bca4-c1c5-4a1f-b5a2-4f1d2af086a4-openstack-edpm-ipam\") pod \"dnsmasq-dns-cb6ffcf87-x2rzr\" (UID: \"5e41bca4-c1c5-4a1f-b5a2-4f1d2af086a4\") " pod="openstack/dnsmasq-dns-cb6ffcf87-x2rzr" Sep 29 19:29:38 crc kubenswrapper[4779]: I0929 19:29:38.686136 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5e41bca4-c1c5-4a1f-b5a2-4f1d2af086a4-ovsdbserver-sb\") pod \"dnsmasq-dns-cb6ffcf87-x2rzr\" (UID: \"5e41bca4-c1c5-4a1f-b5a2-4f1d2af086a4\") " pod="openstack/dnsmasq-dns-cb6ffcf87-x2rzr" Sep 29 19:29:38 crc kubenswrapper[4779]: I0929 19:29:38.686177 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/5e41bca4-c1c5-4a1f-b5a2-4f1d2af086a4-dns-swift-storage-0\") pod \"dnsmasq-dns-cb6ffcf87-x2rzr\" (UID: \"5e41bca4-c1c5-4a1f-b5a2-4f1d2af086a4\") " pod="openstack/dnsmasq-dns-cb6ffcf87-x2rzr" Sep 29 19:29:38 crc kubenswrapper[4779]: I0929 19:29:38.686212 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5e41bca4-c1c5-4a1f-b5a2-4f1d2af086a4-ovsdbserver-nb\") pod \"dnsmasq-dns-cb6ffcf87-x2rzr\" (UID: \"5e41bca4-c1c5-4a1f-b5a2-4f1d2af086a4\") " pod="openstack/dnsmasq-dns-cb6ffcf87-x2rzr" Sep 29 19:29:38 crc kubenswrapper[4779]: I0929 19:29:38.686794 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5e41bca4-c1c5-4a1f-b5a2-4f1d2af086a4-dns-svc\") pod \"dnsmasq-dns-cb6ffcf87-x2rzr\" (UID: \"5e41bca4-c1c5-4a1f-b5a2-4f1d2af086a4\") " pod="openstack/dnsmasq-dns-cb6ffcf87-x2rzr" Sep 29 19:29:38 crc kubenswrapper[4779]: I0929 19:29:38.713089 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xj8d8\" (UniqueName: \"kubernetes.io/projected/5e41bca4-c1c5-4a1f-b5a2-4f1d2af086a4-kube-api-access-xj8d8\") pod \"dnsmasq-dns-cb6ffcf87-x2rzr\" (UID: \"5e41bca4-c1c5-4a1f-b5a2-4f1d2af086a4\") " pod="openstack/dnsmasq-dns-cb6ffcf87-x2rzr" Sep 29 19:29:38 crc kubenswrapper[4779]: I0929 19:29:38.804154 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-59cf4bdb65-mzp42" Sep 29 19:29:38 crc kubenswrapper[4779]: I0929 19:29:38.830958 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-cb6ffcf87-x2rzr" Sep 29 19:29:38 crc kubenswrapper[4779]: I0929 19:29:38.887104 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ac6039d1-710e-4a9b-b96b-c20dd2c9937f-ovsdbserver-nb\") pod \"ac6039d1-710e-4a9b-b96b-c20dd2c9937f\" (UID: \"ac6039d1-710e-4a9b-b96b-c20dd2c9937f\") " Sep 29 19:29:38 crc kubenswrapper[4779]: I0929 19:29:38.887225 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ac6039d1-710e-4a9b-b96b-c20dd2c9937f-ovsdbserver-sb\") pod \"ac6039d1-710e-4a9b-b96b-c20dd2c9937f\" (UID: \"ac6039d1-710e-4a9b-b96b-c20dd2c9937f\") " Sep 29 19:29:38 crc kubenswrapper[4779]: I0929 19:29:38.887264 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ac6039d1-710e-4a9b-b96b-c20dd2c9937f-config\") pod \"ac6039d1-710e-4a9b-b96b-c20dd2c9937f\" (UID: \"ac6039d1-710e-4a9b-b96b-c20dd2c9937f\") " Sep 29 19:29:38 crc kubenswrapper[4779]: I0929 19:29:38.887292 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ac6039d1-710e-4a9b-b96b-c20dd2c9937f-dns-svc\") pod \"ac6039d1-710e-4a9b-b96b-c20dd2c9937f\" (UID: \"ac6039d1-710e-4a9b-b96b-c20dd2c9937f\") " Sep 29 19:29:38 crc kubenswrapper[4779]: I0929 19:29:38.887402 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rv5cx\" (UniqueName: \"kubernetes.io/projected/ac6039d1-710e-4a9b-b96b-c20dd2c9937f-kube-api-access-rv5cx\") pod \"ac6039d1-710e-4a9b-b96b-c20dd2c9937f\" (UID: \"ac6039d1-710e-4a9b-b96b-c20dd2c9937f\") " Sep 29 19:29:38 crc kubenswrapper[4779]: I0929 19:29:38.887433 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/ac6039d1-710e-4a9b-b96b-c20dd2c9937f-dns-swift-storage-0\") pod \"ac6039d1-710e-4a9b-b96b-c20dd2c9937f\" (UID: \"ac6039d1-710e-4a9b-b96b-c20dd2c9937f\") " Sep 29 19:29:38 crc kubenswrapper[4779]: I0929 19:29:38.891748 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ac6039d1-710e-4a9b-b96b-c20dd2c9937f-kube-api-access-rv5cx" (OuterVolumeSpecName: "kube-api-access-rv5cx") pod "ac6039d1-710e-4a9b-b96b-c20dd2c9937f" (UID: "ac6039d1-710e-4a9b-b96b-c20dd2c9937f"). InnerVolumeSpecName "kube-api-access-rv5cx". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:29:38 crc kubenswrapper[4779]: I0929 19:29:38.938252 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ac6039d1-710e-4a9b-b96b-c20dd2c9937f-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "ac6039d1-710e-4a9b-b96b-c20dd2c9937f" (UID: "ac6039d1-710e-4a9b-b96b-c20dd2c9937f"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:29:38 crc kubenswrapper[4779]: I0929 19:29:38.943778 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ac6039d1-710e-4a9b-b96b-c20dd2c9937f-config" (OuterVolumeSpecName: "config") pod "ac6039d1-710e-4a9b-b96b-c20dd2c9937f" (UID: "ac6039d1-710e-4a9b-b96b-c20dd2c9937f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:29:38 crc kubenswrapper[4779]: I0929 19:29:38.962078 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ac6039d1-710e-4a9b-b96b-c20dd2c9937f-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "ac6039d1-710e-4a9b-b96b-c20dd2c9937f" (UID: "ac6039d1-710e-4a9b-b96b-c20dd2c9937f"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:29:38 crc kubenswrapper[4779]: I0929 19:29:38.969017 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ac6039d1-710e-4a9b-b96b-c20dd2c9937f-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "ac6039d1-710e-4a9b-b96b-c20dd2c9937f" (UID: "ac6039d1-710e-4a9b-b96b-c20dd2c9937f"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:29:38 crc kubenswrapper[4779]: I0929 19:29:38.975300 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ac6039d1-710e-4a9b-b96b-c20dd2c9937f-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "ac6039d1-710e-4a9b-b96b-c20dd2c9937f" (UID: "ac6039d1-710e-4a9b-b96b-c20dd2c9937f"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:29:38 crc kubenswrapper[4779]: I0929 19:29:38.990077 4779 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ac6039d1-710e-4a9b-b96b-c20dd2c9937f-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Sep 29 19:29:38 crc kubenswrapper[4779]: I0929 19:29:38.990112 4779 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ac6039d1-710e-4a9b-b96b-c20dd2c9937f-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Sep 29 19:29:38 crc kubenswrapper[4779]: I0929 19:29:38.990125 4779 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ac6039d1-710e-4a9b-b96b-c20dd2c9937f-config\") on node \"crc\" DevicePath \"\"" Sep 29 19:29:38 crc kubenswrapper[4779]: I0929 19:29:38.990135 4779 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ac6039d1-710e-4a9b-b96b-c20dd2c9937f-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 29 19:29:38 crc kubenswrapper[4779]: I0929 19:29:38.990145 4779 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/ac6039d1-710e-4a9b-b96b-c20dd2c9937f-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Sep 29 19:29:38 crc kubenswrapper[4779]: I0929 19:29:38.990156 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rv5cx\" (UniqueName: \"kubernetes.io/projected/ac6039d1-710e-4a9b-b96b-c20dd2c9937f-kube-api-access-rv5cx\") on node \"crc\" DevicePath \"\"" Sep 29 19:29:39 crc kubenswrapper[4779]: I0929 19:29:39.293669 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-cb6ffcf87-x2rzr"] Sep 29 19:29:39 crc kubenswrapper[4779]: W0929 19:29:39.300210 4779 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5e41bca4_c1c5_4a1f_b5a2_4f1d2af086a4.slice/crio-96a23362b25b0dc66bc91237dfb5dfa876ea5af0a60887a44d7f60e3db723fa1 WatchSource:0}: Error finding container 96a23362b25b0dc66bc91237dfb5dfa876ea5af0a60887a44d7f60e3db723fa1: Status 404 returned error can't find the container with id 96a23362b25b0dc66bc91237dfb5dfa876ea5af0a60887a44d7f60e3db723fa1 Sep 29 19:29:39 crc kubenswrapper[4779]: I0929 19:29:39.655645 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-59cf4bdb65-mzp42" event={"ID":"ac6039d1-710e-4a9b-b96b-c20dd2c9937f","Type":"ContainerDied","Data":"213ee505c47ceefc698503d5728b588fa9d66b82fdcd56e525a48be7439c9a39"} Sep 29 19:29:39 crc kubenswrapper[4779]: I0929 19:29:39.655919 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-59cf4bdb65-mzp42" Sep 29 19:29:39 crc kubenswrapper[4779]: I0929 19:29:39.655951 4779 scope.go:117] "RemoveContainer" containerID="9b603cf1ac3e2424b11fe2c76a37312176ae2d390cb29bb7cdebde69ea689d1f" Sep 29 19:29:39 crc kubenswrapper[4779]: I0929 19:29:39.659903 4779 generic.go:334] "Generic (PLEG): container finished" podID="5e41bca4-c1c5-4a1f-b5a2-4f1d2af086a4" containerID="b2286e2f30374358ef0ac3f6ff01c99b0c5bbf6447f42100c448d45b99652361" exitCode=0 Sep 29 19:29:39 crc kubenswrapper[4779]: I0929 19:29:39.659938 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-cb6ffcf87-x2rzr" event={"ID":"5e41bca4-c1c5-4a1f-b5a2-4f1d2af086a4","Type":"ContainerDied","Data":"b2286e2f30374358ef0ac3f6ff01c99b0c5bbf6447f42100c448d45b99652361"} Sep 29 19:29:39 crc kubenswrapper[4779]: I0929 19:29:39.659961 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-cb6ffcf87-x2rzr" event={"ID":"5e41bca4-c1c5-4a1f-b5a2-4f1d2af086a4","Type":"ContainerStarted","Data":"96a23362b25b0dc66bc91237dfb5dfa876ea5af0a60887a44d7f60e3db723fa1"} Sep 29 19:29:39 crc kubenswrapper[4779]: I0929 19:29:39.694880 4779 scope.go:117] "RemoveContainer" containerID="0a15989b4f9b84fe7d394d4b3fd8d9d39c5f8f3e95fea9b6a9dd178d5b7a90eb" Sep 29 19:29:39 crc kubenswrapper[4779]: I0929 19:29:39.812379 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-59cf4bdb65-mzp42"] Sep 29 19:29:39 crc kubenswrapper[4779]: I0929 19:29:39.819125 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-59cf4bdb65-mzp42"] Sep 29 19:29:40 crc kubenswrapper[4779]: I0929 19:29:40.673027 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-cb6ffcf87-x2rzr" event={"ID":"5e41bca4-c1c5-4a1f-b5a2-4f1d2af086a4","Type":"ContainerStarted","Data":"c453f28992e0c37e070ffefb1eec4a450e8048c15ad36c7a1bf919724262aa66"} Sep 29 19:29:40 crc kubenswrapper[4779]: I0929 19:29:40.673260 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-cb6ffcf87-x2rzr" Sep 29 19:29:40 crc kubenswrapper[4779]: I0929 19:29:40.713190 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-cb6ffcf87-x2rzr" podStartSLOduration=2.71316214 podStartE2EDuration="2.71316214s" podCreationTimestamp="2025-09-29 19:29:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 19:29:40.696287319 +0000 UTC m=+1291.580712419" watchObservedRunningTime="2025-09-29 19:29:40.71316214 +0000 UTC m=+1291.597587270" Sep 29 19:29:41 crc kubenswrapper[4779]: I0929 19:29:41.778808 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ac6039d1-710e-4a9b-b96b-c20dd2c9937f" path="/var/lib/kubelet/pods/ac6039d1-710e-4a9b-b96b-c20dd2c9937f/volumes" Sep 29 19:29:48 crc kubenswrapper[4779]: I0929 19:29:48.833008 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-cb6ffcf87-x2rzr" Sep 29 19:29:48 crc kubenswrapper[4779]: I0929 19:29:48.917961 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-67b789f86c-pbkzj"] Sep 29 19:29:48 crc kubenswrapper[4779]: I0929 19:29:48.918189 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-67b789f86c-pbkzj" podUID="5e7c7425-85ad-492f-8e9a-6172f8e383eb" containerName="dnsmasq-dns" containerID="cri-o://56826cbabdefcc09b4a9adb4df1637b1e7a5a31b73ad2da3437801f94f5c8e78" gracePeriod=10 Sep 29 19:29:49 crc kubenswrapper[4779]: I0929 19:29:49.370861 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-67b789f86c-pbkzj" Sep 29 19:29:49 crc kubenswrapper[4779]: I0929 19:29:49.500546 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5e7c7425-85ad-492f-8e9a-6172f8e383eb-config\") pod \"5e7c7425-85ad-492f-8e9a-6172f8e383eb\" (UID: \"5e7c7425-85ad-492f-8e9a-6172f8e383eb\") " Sep 29 19:29:49 crc kubenswrapper[4779]: I0929 19:29:49.500634 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5e7c7425-85ad-492f-8e9a-6172f8e383eb-ovsdbserver-nb\") pod \"5e7c7425-85ad-492f-8e9a-6172f8e383eb\" (UID: \"5e7c7425-85ad-492f-8e9a-6172f8e383eb\") " Sep 29 19:29:49 crc kubenswrapper[4779]: I0929 19:29:49.500846 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-scxq8\" (UniqueName: \"kubernetes.io/projected/5e7c7425-85ad-492f-8e9a-6172f8e383eb-kube-api-access-scxq8\") pod \"5e7c7425-85ad-492f-8e9a-6172f8e383eb\" (UID: \"5e7c7425-85ad-492f-8e9a-6172f8e383eb\") " Sep 29 19:29:49 crc kubenswrapper[4779]: I0929 19:29:49.500879 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/5e7c7425-85ad-492f-8e9a-6172f8e383eb-openstack-edpm-ipam\") pod \"5e7c7425-85ad-492f-8e9a-6172f8e383eb\" (UID: \"5e7c7425-85ad-492f-8e9a-6172f8e383eb\") " Sep 29 19:29:49 crc kubenswrapper[4779]: I0929 19:29:49.500901 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/5e7c7425-85ad-492f-8e9a-6172f8e383eb-dns-swift-storage-0\") pod \"5e7c7425-85ad-492f-8e9a-6172f8e383eb\" (UID: \"5e7c7425-85ad-492f-8e9a-6172f8e383eb\") " Sep 29 19:29:49 crc kubenswrapper[4779]: I0929 19:29:49.500929 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5e7c7425-85ad-492f-8e9a-6172f8e383eb-dns-svc\") pod \"5e7c7425-85ad-492f-8e9a-6172f8e383eb\" (UID: \"5e7c7425-85ad-492f-8e9a-6172f8e383eb\") " Sep 29 19:29:49 crc kubenswrapper[4779]: I0929 19:29:49.500961 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5e7c7425-85ad-492f-8e9a-6172f8e383eb-ovsdbserver-sb\") pod \"5e7c7425-85ad-492f-8e9a-6172f8e383eb\" (UID: \"5e7c7425-85ad-492f-8e9a-6172f8e383eb\") " Sep 29 19:29:49 crc kubenswrapper[4779]: I0929 19:29:49.509918 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5e7c7425-85ad-492f-8e9a-6172f8e383eb-kube-api-access-scxq8" (OuterVolumeSpecName: "kube-api-access-scxq8") pod "5e7c7425-85ad-492f-8e9a-6172f8e383eb" (UID: "5e7c7425-85ad-492f-8e9a-6172f8e383eb"). InnerVolumeSpecName "kube-api-access-scxq8". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:29:49 crc kubenswrapper[4779]: I0929 19:29:49.546822 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5e7c7425-85ad-492f-8e9a-6172f8e383eb-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "5e7c7425-85ad-492f-8e9a-6172f8e383eb" (UID: "5e7c7425-85ad-492f-8e9a-6172f8e383eb"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:29:49 crc kubenswrapper[4779]: I0929 19:29:49.554821 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5e7c7425-85ad-492f-8e9a-6172f8e383eb-config" (OuterVolumeSpecName: "config") pod "5e7c7425-85ad-492f-8e9a-6172f8e383eb" (UID: "5e7c7425-85ad-492f-8e9a-6172f8e383eb"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:29:49 crc kubenswrapper[4779]: I0929 19:29:49.570084 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5e7c7425-85ad-492f-8e9a-6172f8e383eb-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "5e7c7425-85ad-492f-8e9a-6172f8e383eb" (UID: "5e7c7425-85ad-492f-8e9a-6172f8e383eb"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:29:49 crc kubenswrapper[4779]: I0929 19:29:49.571370 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5e7c7425-85ad-492f-8e9a-6172f8e383eb-openstack-edpm-ipam" (OuterVolumeSpecName: "openstack-edpm-ipam") pod "5e7c7425-85ad-492f-8e9a-6172f8e383eb" (UID: "5e7c7425-85ad-492f-8e9a-6172f8e383eb"). InnerVolumeSpecName "openstack-edpm-ipam". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:29:49 crc kubenswrapper[4779]: I0929 19:29:49.572912 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5e7c7425-85ad-492f-8e9a-6172f8e383eb-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "5e7c7425-85ad-492f-8e9a-6172f8e383eb" (UID: "5e7c7425-85ad-492f-8e9a-6172f8e383eb"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:29:49 crc kubenswrapper[4779]: I0929 19:29:49.574488 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5e7c7425-85ad-492f-8e9a-6172f8e383eb-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "5e7c7425-85ad-492f-8e9a-6172f8e383eb" (UID: "5e7c7425-85ad-492f-8e9a-6172f8e383eb"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:29:49 crc kubenswrapper[4779]: I0929 19:29:49.602858 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-scxq8\" (UniqueName: \"kubernetes.io/projected/5e7c7425-85ad-492f-8e9a-6172f8e383eb-kube-api-access-scxq8\") on node \"crc\" DevicePath \"\"" Sep 29 19:29:49 crc kubenswrapper[4779]: I0929 19:29:49.602885 4779 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/5e7c7425-85ad-492f-8e9a-6172f8e383eb-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Sep 29 19:29:49 crc kubenswrapper[4779]: I0929 19:29:49.602894 4779 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/5e7c7425-85ad-492f-8e9a-6172f8e383eb-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Sep 29 19:29:49 crc kubenswrapper[4779]: I0929 19:29:49.602904 4779 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5e7c7425-85ad-492f-8e9a-6172f8e383eb-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 29 19:29:49 crc kubenswrapper[4779]: I0929 19:29:49.602914 4779 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5e7c7425-85ad-492f-8e9a-6172f8e383eb-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Sep 29 19:29:49 crc kubenswrapper[4779]: I0929 19:29:49.602924 4779 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5e7c7425-85ad-492f-8e9a-6172f8e383eb-config\") on node \"crc\" DevicePath \"\"" Sep 29 19:29:49 crc kubenswrapper[4779]: I0929 19:29:49.602932 4779 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5e7c7425-85ad-492f-8e9a-6172f8e383eb-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Sep 29 19:29:49 crc kubenswrapper[4779]: I0929 19:29:49.776947 4779 generic.go:334] "Generic (PLEG): container finished" podID="5e7c7425-85ad-492f-8e9a-6172f8e383eb" containerID="56826cbabdefcc09b4a9adb4df1637b1e7a5a31b73ad2da3437801f94f5c8e78" exitCode=0 Sep 29 19:29:49 crc kubenswrapper[4779]: I0929 19:29:49.777047 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-67b789f86c-pbkzj" Sep 29 19:29:49 crc kubenswrapper[4779]: I0929 19:29:49.783530 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-67b789f86c-pbkzj" event={"ID":"5e7c7425-85ad-492f-8e9a-6172f8e383eb","Type":"ContainerDied","Data":"56826cbabdefcc09b4a9adb4df1637b1e7a5a31b73ad2da3437801f94f5c8e78"} Sep 29 19:29:49 crc kubenswrapper[4779]: I0929 19:29:49.783728 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-67b789f86c-pbkzj" event={"ID":"5e7c7425-85ad-492f-8e9a-6172f8e383eb","Type":"ContainerDied","Data":"5dcf0ab615e56b803f683cd514ca418f78f3fe98083421904e02efb4c29b3a6e"} Sep 29 19:29:49 crc kubenswrapper[4779]: I0929 19:29:49.783804 4779 scope.go:117] "RemoveContainer" containerID="56826cbabdefcc09b4a9adb4df1637b1e7a5a31b73ad2da3437801f94f5c8e78" Sep 29 19:29:49 crc kubenswrapper[4779]: I0929 19:29:49.811651 4779 scope.go:117] "RemoveContainer" containerID="cf2d8e991aacd485920170bf0276eee1bfa705b39aad9860a48b4a8e08a09360" Sep 29 19:29:49 crc kubenswrapper[4779]: I0929 19:29:49.842358 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-67b789f86c-pbkzj"] Sep 29 19:29:49 crc kubenswrapper[4779]: I0929 19:29:49.844041 4779 scope.go:117] "RemoveContainer" containerID="56826cbabdefcc09b4a9adb4df1637b1e7a5a31b73ad2da3437801f94f5c8e78" Sep 29 19:29:49 crc kubenswrapper[4779]: E0929 19:29:49.844658 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"56826cbabdefcc09b4a9adb4df1637b1e7a5a31b73ad2da3437801f94f5c8e78\": container with ID starting with 56826cbabdefcc09b4a9adb4df1637b1e7a5a31b73ad2da3437801f94f5c8e78 not found: ID does not exist" containerID="56826cbabdefcc09b4a9adb4df1637b1e7a5a31b73ad2da3437801f94f5c8e78" Sep 29 19:29:49 crc kubenswrapper[4779]: I0929 19:29:49.844709 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"56826cbabdefcc09b4a9adb4df1637b1e7a5a31b73ad2da3437801f94f5c8e78"} err="failed to get container status \"56826cbabdefcc09b4a9adb4df1637b1e7a5a31b73ad2da3437801f94f5c8e78\": rpc error: code = NotFound desc = could not find container \"56826cbabdefcc09b4a9adb4df1637b1e7a5a31b73ad2da3437801f94f5c8e78\": container with ID starting with 56826cbabdefcc09b4a9adb4df1637b1e7a5a31b73ad2da3437801f94f5c8e78 not found: ID does not exist" Sep 29 19:29:49 crc kubenswrapper[4779]: I0929 19:29:49.844745 4779 scope.go:117] "RemoveContainer" containerID="cf2d8e991aacd485920170bf0276eee1bfa705b39aad9860a48b4a8e08a09360" Sep 29 19:29:49 crc kubenswrapper[4779]: E0929 19:29:49.845155 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cf2d8e991aacd485920170bf0276eee1bfa705b39aad9860a48b4a8e08a09360\": container with ID starting with cf2d8e991aacd485920170bf0276eee1bfa705b39aad9860a48b4a8e08a09360 not found: ID does not exist" containerID="cf2d8e991aacd485920170bf0276eee1bfa705b39aad9860a48b4a8e08a09360" Sep 29 19:29:49 crc kubenswrapper[4779]: I0929 19:29:49.845191 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cf2d8e991aacd485920170bf0276eee1bfa705b39aad9860a48b4a8e08a09360"} err="failed to get container status \"cf2d8e991aacd485920170bf0276eee1bfa705b39aad9860a48b4a8e08a09360\": rpc error: code = NotFound desc = could not find container \"cf2d8e991aacd485920170bf0276eee1bfa705b39aad9860a48b4a8e08a09360\": container with ID starting with cf2d8e991aacd485920170bf0276eee1bfa705b39aad9860a48b4a8e08a09360 not found: ID does not exist" Sep 29 19:29:49 crc kubenswrapper[4779]: I0929 19:29:49.850093 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-67b789f86c-pbkzj"] Sep 29 19:29:51 crc kubenswrapper[4779]: I0929 19:29:51.793074 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5e7c7425-85ad-492f-8e9a-6172f8e383eb" path="/var/lib/kubelet/pods/5e7c7425-85ad-492f-8e9a-6172f8e383eb/volumes" Sep 29 19:29:57 crc kubenswrapper[4779]: I0929 19:29:57.867751 4779 generic.go:334] "Generic (PLEG): container finished" podID="cde20801-b6a5-444f-ad26-2b36244bb38d" containerID="01eb00c878841d80791ec9951bc143fdf1d5a6b750a8d54ab2ff6ee17d07c624" exitCode=0 Sep 29 19:29:57 crc kubenswrapper[4779]: I0929 19:29:57.867879 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"cde20801-b6a5-444f-ad26-2b36244bb38d","Type":"ContainerDied","Data":"01eb00c878841d80791ec9951bc143fdf1d5a6b750a8d54ab2ff6ee17d07c624"} Sep 29 19:29:58 crc kubenswrapper[4779]: I0929 19:29:58.879197 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"cde20801-b6a5-444f-ad26-2b36244bb38d","Type":"ContainerStarted","Data":"88202305eb2e27d1b9a2fb500a11d27b91f1f7a9a6e74763fc82dd7ddc3da286"} Sep 29 19:29:58 crc kubenswrapper[4779]: I0929 19:29:58.879759 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-server-0" Sep 29 19:29:58 crc kubenswrapper[4779]: I0929 19:29:58.883154 4779 generic.go:334] "Generic (PLEG): container finished" podID="e0f654c2-f8a3-4049-a18c-75f12edc65ca" containerID="88f08604e8961c9925a92c07398b8c3e4864b53558b58a7cd588e37b24b3b959" exitCode=0 Sep 29 19:29:58 crc kubenswrapper[4779]: I0929 19:29:58.883197 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"e0f654c2-f8a3-4049-a18c-75f12edc65ca","Type":"ContainerDied","Data":"88f08604e8961c9925a92c07398b8c3e4864b53558b58a7cd588e37b24b3b959"} Sep 29 19:29:58 crc kubenswrapper[4779]: I0929 19:29:58.973233 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-server-0" podStartSLOduration=33.973212891 podStartE2EDuration="33.973212891s" podCreationTimestamp="2025-09-29 19:29:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 19:29:58.913793577 +0000 UTC m=+1309.798218717" watchObservedRunningTime="2025-09-29 19:29:58.973212891 +0000 UTC m=+1309.857638001" Sep 29 19:29:59 crc kubenswrapper[4779]: I0929 19:29:59.895524 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"e0f654c2-f8a3-4049-a18c-75f12edc65ca","Type":"ContainerStarted","Data":"09e268d65c531a2c2d81d528911a134041faad54b93698ceac5e0bafd01f0adf"} Sep 29 19:29:59 crc kubenswrapper[4779]: I0929 19:29:59.896134 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-cell1-server-0" Sep 29 19:29:59 crc kubenswrapper[4779]: I0929 19:29:59.931738 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-cell1-server-0" podStartSLOduration=33.931718781 podStartE2EDuration="33.931718781s" podCreationTimestamp="2025-09-29 19:29:26 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 19:29:59.930923719 +0000 UTC m=+1310.815348829" watchObservedRunningTime="2025-09-29 19:29:59.931718781 +0000 UTC m=+1310.816143881" Sep 29 19:30:00 crc kubenswrapper[4779]: I0929 19:30:00.140880 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319570-j2rbd"] Sep 29 19:30:00 crc kubenswrapper[4779]: E0929 19:30:00.141720 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ac6039d1-710e-4a9b-b96b-c20dd2c9937f" containerName="init" Sep 29 19:30:00 crc kubenswrapper[4779]: I0929 19:30:00.141822 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="ac6039d1-710e-4a9b-b96b-c20dd2c9937f" containerName="init" Sep 29 19:30:00 crc kubenswrapper[4779]: E0929 19:30:00.141898 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5e7c7425-85ad-492f-8e9a-6172f8e383eb" containerName="dnsmasq-dns" Sep 29 19:30:00 crc kubenswrapper[4779]: I0929 19:30:00.141957 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="5e7c7425-85ad-492f-8e9a-6172f8e383eb" containerName="dnsmasq-dns" Sep 29 19:30:00 crc kubenswrapper[4779]: E0929 19:30:00.142041 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5e7c7425-85ad-492f-8e9a-6172f8e383eb" containerName="init" Sep 29 19:30:00 crc kubenswrapper[4779]: I0929 19:30:00.142116 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="5e7c7425-85ad-492f-8e9a-6172f8e383eb" containerName="init" Sep 29 19:30:00 crc kubenswrapper[4779]: E0929 19:30:00.142180 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ac6039d1-710e-4a9b-b96b-c20dd2c9937f" containerName="dnsmasq-dns" Sep 29 19:30:00 crc kubenswrapper[4779]: I0929 19:30:00.142240 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="ac6039d1-710e-4a9b-b96b-c20dd2c9937f" containerName="dnsmasq-dns" Sep 29 19:30:00 crc kubenswrapper[4779]: I0929 19:30:00.142564 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="ac6039d1-710e-4a9b-b96b-c20dd2c9937f" containerName="dnsmasq-dns" Sep 29 19:30:00 crc kubenswrapper[4779]: I0929 19:30:00.142665 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="5e7c7425-85ad-492f-8e9a-6172f8e383eb" containerName="dnsmasq-dns" Sep 29 19:30:00 crc kubenswrapper[4779]: I0929 19:30:00.143529 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319570-j2rbd" Sep 29 19:30:00 crc kubenswrapper[4779]: I0929 19:30:00.146373 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Sep 29 19:30:00 crc kubenswrapper[4779]: I0929 19:30:00.146882 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Sep 29 19:30:00 crc kubenswrapper[4779]: I0929 19:30:00.159934 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319570-j2rbd"] Sep 29 19:30:00 crc kubenswrapper[4779]: I0929 19:30:00.332433 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/b668ad5e-5ccd-48ef-9a1d-a42bd4b7f3ab-secret-volume\") pod \"collect-profiles-29319570-j2rbd\" (UID: \"b668ad5e-5ccd-48ef-9a1d-a42bd4b7f3ab\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319570-j2rbd" Sep 29 19:30:00 crc kubenswrapper[4779]: I0929 19:30:00.332517 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/b668ad5e-5ccd-48ef-9a1d-a42bd4b7f3ab-config-volume\") pod \"collect-profiles-29319570-j2rbd\" (UID: \"b668ad5e-5ccd-48ef-9a1d-a42bd4b7f3ab\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319570-j2rbd" Sep 29 19:30:00 crc kubenswrapper[4779]: I0929 19:30:00.332547 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8h8bq\" (UniqueName: \"kubernetes.io/projected/b668ad5e-5ccd-48ef-9a1d-a42bd4b7f3ab-kube-api-access-8h8bq\") pod \"collect-profiles-29319570-j2rbd\" (UID: \"b668ad5e-5ccd-48ef-9a1d-a42bd4b7f3ab\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319570-j2rbd" Sep 29 19:30:00 crc kubenswrapper[4779]: I0929 19:30:00.433607 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/b668ad5e-5ccd-48ef-9a1d-a42bd4b7f3ab-config-volume\") pod \"collect-profiles-29319570-j2rbd\" (UID: \"b668ad5e-5ccd-48ef-9a1d-a42bd4b7f3ab\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319570-j2rbd" Sep 29 19:30:00 crc kubenswrapper[4779]: I0929 19:30:00.433887 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8h8bq\" (UniqueName: \"kubernetes.io/projected/b668ad5e-5ccd-48ef-9a1d-a42bd4b7f3ab-kube-api-access-8h8bq\") pod \"collect-profiles-29319570-j2rbd\" (UID: \"b668ad5e-5ccd-48ef-9a1d-a42bd4b7f3ab\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319570-j2rbd" Sep 29 19:30:00 crc kubenswrapper[4779]: I0929 19:30:00.433997 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/b668ad5e-5ccd-48ef-9a1d-a42bd4b7f3ab-secret-volume\") pod \"collect-profiles-29319570-j2rbd\" (UID: \"b668ad5e-5ccd-48ef-9a1d-a42bd4b7f3ab\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319570-j2rbd" Sep 29 19:30:00 crc kubenswrapper[4779]: I0929 19:30:00.434582 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/b668ad5e-5ccd-48ef-9a1d-a42bd4b7f3ab-config-volume\") pod \"collect-profiles-29319570-j2rbd\" (UID: \"b668ad5e-5ccd-48ef-9a1d-a42bd4b7f3ab\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319570-j2rbd" Sep 29 19:30:00 crc kubenswrapper[4779]: I0929 19:30:00.442955 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/b668ad5e-5ccd-48ef-9a1d-a42bd4b7f3ab-secret-volume\") pod \"collect-profiles-29319570-j2rbd\" (UID: \"b668ad5e-5ccd-48ef-9a1d-a42bd4b7f3ab\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319570-j2rbd" Sep 29 19:30:00 crc kubenswrapper[4779]: I0929 19:30:00.492094 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8h8bq\" (UniqueName: \"kubernetes.io/projected/b668ad5e-5ccd-48ef-9a1d-a42bd4b7f3ab-kube-api-access-8h8bq\") pod \"collect-profiles-29319570-j2rbd\" (UID: \"b668ad5e-5ccd-48ef-9a1d-a42bd4b7f3ab\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319570-j2rbd" Sep 29 19:30:00 crc kubenswrapper[4779]: I0929 19:30:00.761308 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319570-j2rbd" Sep 29 19:30:01 crc kubenswrapper[4779]: W0929 19:30:01.219017 4779 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb668ad5e_5ccd_48ef_9a1d_a42bd4b7f3ab.slice/crio-6cae09c4fd3babae4a83aa07bc6b071f1900a6fa52259cc67c9c2efddc42b4ab WatchSource:0}: Error finding container 6cae09c4fd3babae4a83aa07bc6b071f1900a6fa52259cc67c9c2efddc42b4ab: Status 404 returned error can't find the container with id 6cae09c4fd3babae4a83aa07bc6b071f1900a6fa52259cc67c9c2efddc42b4ab Sep 29 19:30:01 crc kubenswrapper[4779]: I0929 19:30:01.223038 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319570-j2rbd"] Sep 29 19:30:01 crc kubenswrapper[4779]: I0929 19:30:01.317726 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-pkmk8"] Sep 29 19:30:01 crc kubenswrapper[4779]: I0929 19:30:01.319354 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-pkmk8" Sep 29 19:30:01 crc kubenswrapper[4779]: I0929 19:30:01.321846 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 29 19:30:01 crc kubenswrapper[4779]: I0929 19:30:01.323902 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Sep 29 19:30:01 crc kubenswrapper[4779]: I0929 19:30:01.323909 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Sep 29 19:30:01 crc kubenswrapper[4779]: I0929 19:30:01.324221 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-v8454" Sep 29 19:30:01 crc kubenswrapper[4779]: I0929 19:30:01.338517 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-pkmk8"] Sep 29 19:30:01 crc kubenswrapper[4779]: I0929 19:30:01.453919 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/76990f22-5c56-482e-a5a6-6d3c74bba7cd-repo-setup-combined-ca-bundle\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-pkmk8\" (UID: \"76990f22-5c56-482e-a5a6-6d3c74bba7cd\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-pkmk8" Sep 29 19:30:01 crc kubenswrapper[4779]: I0929 19:30:01.453965 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/76990f22-5c56-482e-a5a6-6d3c74bba7cd-inventory\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-pkmk8\" (UID: \"76990f22-5c56-482e-a5a6-6d3c74bba7cd\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-pkmk8" Sep 29 19:30:01 crc kubenswrapper[4779]: I0929 19:30:01.454142 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4l4k9\" (UniqueName: \"kubernetes.io/projected/76990f22-5c56-482e-a5a6-6d3c74bba7cd-kube-api-access-4l4k9\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-pkmk8\" (UID: \"76990f22-5c56-482e-a5a6-6d3c74bba7cd\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-pkmk8" Sep 29 19:30:01 crc kubenswrapper[4779]: I0929 19:30:01.454457 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/76990f22-5c56-482e-a5a6-6d3c74bba7cd-ssh-key\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-pkmk8\" (UID: \"76990f22-5c56-482e-a5a6-6d3c74bba7cd\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-pkmk8" Sep 29 19:30:01 crc kubenswrapper[4779]: I0929 19:30:01.556566 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4l4k9\" (UniqueName: \"kubernetes.io/projected/76990f22-5c56-482e-a5a6-6d3c74bba7cd-kube-api-access-4l4k9\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-pkmk8\" (UID: \"76990f22-5c56-482e-a5a6-6d3c74bba7cd\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-pkmk8" Sep 29 19:30:01 crc kubenswrapper[4779]: I0929 19:30:01.556757 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/76990f22-5c56-482e-a5a6-6d3c74bba7cd-ssh-key\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-pkmk8\" (UID: \"76990f22-5c56-482e-a5a6-6d3c74bba7cd\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-pkmk8" Sep 29 19:30:01 crc kubenswrapper[4779]: I0929 19:30:01.556869 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/76990f22-5c56-482e-a5a6-6d3c74bba7cd-repo-setup-combined-ca-bundle\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-pkmk8\" (UID: \"76990f22-5c56-482e-a5a6-6d3c74bba7cd\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-pkmk8" Sep 29 19:30:01 crc kubenswrapper[4779]: I0929 19:30:01.556912 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/76990f22-5c56-482e-a5a6-6d3c74bba7cd-inventory\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-pkmk8\" (UID: \"76990f22-5c56-482e-a5a6-6d3c74bba7cd\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-pkmk8" Sep 29 19:30:01 crc kubenswrapper[4779]: I0929 19:30:01.600489 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/76990f22-5c56-482e-a5a6-6d3c74bba7cd-ssh-key\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-pkmk8\" (UID: \"76990f22-5c56-482e-a5a6-6d3c74bba7cd\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-pkmk8" Sep 29 19:30:01 crc kubenswrapper[4779]: I0929 19:30:01.600499 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/76990f22-5c56-482e-a5a6-6d3c74bba7cd-inventory\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-pkmk8\" (UID: \"76990f22-5c56-482e-a5a6-6d3c74bba7cd\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-pkmk8" Sep 29 19:30:01 crc kubenswrapper[4779]: I0929 19:30:01.600605 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4l4k9\" (UniqueName: \"kubernetes.io/projected/76990f22-5c56-482e-a5a6-6d3c74bba7cd-kube-api-access-4l4k9\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-pkmk8\" (UID: \"76990f22-5c56-482e-a5a6-6d3c74bba7cd\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-pkmk8" Sep 29 19:30:01 crc kubenswrapper[4779]: I0929 19:30:01.600839 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/76990f22-5c56-482e-a5a6-6d3c74bba7cd-repo-setup-combined-ca-bundle\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-pkmk8\" (UID: \"76990f22-5c56-482e-a5a6-6d3c74bba7cd\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-pkmk8" Sep 29 19:30:01 crc kubenswrapper[4779]: I0929 19:30:01.655071 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-pkmk8" Sep 29 19:30:01 crc kubenswrapper[4779]: I0929 19:30:01.916637 4779 generic.go:334] "Generic (PLEG): container finished" podID="b668ad5e-5ccd-48ef-9a1d-a42bd4b7f3ab" containerID="18befd3a77b3f05971a2e5c5eb1a3a99661e525fefd3a97f46e0422281ca1307" exitCode=0 Sep 29 19:30:01 crc kubenswrapper[4779]: I0929 19:30:01.916742 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29319570-j2rbd" event={"ID":"b668ad5e-5ccd-48ef-9a1d-a42bd4b7f3ab","Type":"ContainerDied","Data":"18befd3a77b3f05971a2e5c5eb1a3a99661e525fefd3a97f46e0422281ca1307"} Sep 29 19:30:01 crc kubenswrapper[4779]: I0929 19:30:01.917043 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29319570-j2rbd" event={"ID":"b668ad5e-5ccd-48ef-9a1d-a42bd4b7f3ab","Type":"ContainerStarted","Data":"6cae09c4fd3babae4a83aa07bc6b071f1900a6fa52259cc67c9c2efddc42b4ab"} Sep 29 19:30:02 crc kubenswrapper[4779]: I0929 19:30:02.240292 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-pkmk8"] Sep 29 19:30:02 crc kubenswrapper[4779]: W0929 19:30:02.245253 4779 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod76990f22_5c56_482e_a5a6_6d3c74bba7cd.slice/crio-b9cabd95befee2a3a96203f2d3a3e045900a9a8f18678ad01391fb008db39fca WatchSource:0}: Error finding container b9cabd95befee2a3a96203f2d3a3e045900a9a8f18678ad01391fb008db39fca: Status 404 returned error can't find the container with id b9cabd95befee2a3a96203f2d3a3e045900a9a8f18678ad01391fb008db39fca Sep 29 19:30:02 crc kubenswrapper[4779]: I0929 19:30:02.928980 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-pkmk8" event={"ID":"76990f22-5c56-482e-a5a6-6d3c74bba7cd","Type":"ContainerStarted","Data":"b9cabd95befee2a3a96203f2d3a3e045900a9a8f18678ad01391fb008db39fca"} Sep 29 19:30:03 crc kubenswrapper[4779]: I0929 19:30:03.276978 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319570-j2rbd" Sep 29 19:30:03 crc kubenswrapper[4779]: I0929 19:30:03.392784 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/b668ad5e-5ccd-48ef-9a1d-a42bd4b7f3ab-config-volume\") pod \"b668ad5e-5ccd-48ef-9a1d-a42bd4b7f3ab\" (UID: \"b668ad5e-5ccd-48ef-9a1d-a42bd4b7f3ab\") " Sep 29 19:30:03 crc kubenswrapper[4779]: I0929 19:30:03.392885 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/b668ad5e-5ccd-48ef-9a1d-a42bd4b7f3ab-secret-volume\") pod \"b668ad5e-5ccd-48ef-9a1d-a42bd4b7f3ab\" (UID: \"b668ad5e-5ccd-48ef-9a1d-a42bd4b7f3ab\") " Sep 29 19:30:03 crc kubenswrapper[4779]: I0929 19:30:03.392964 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8h8bq\" (UniqueName: \"kubernetes.io/projected/b668ad5e-5ccd-48ef-9a1d-a42bd4b7f3ab-kube-api-access-8h8bq\") pod \"b668ad5e-5ccd-48ef-9a1d-a42bd4b7f3ab\" (UID: \"b668ad5e-5ccd-48ef-9a1d-a42bd4b7f3ab\") " Sep 29 19:30:03 crc kubenswrapper[4779]: I0929 19:30:03.393680 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b668ad5e-5ccd-48ef-9a1d-a42bd4b7f3ab-config-volume" (OuterVolumeSpecName: "config-volume") pod "b668ad5e-5ccd-48ef-9a1d-a42bd4b7f3ab" (UID: "b668ad5e-5ccd-48ef-9a1d-a42bd4b7f3ab"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:30:03 crc kubenswrapper[4779]: I0929 19:30:03.400211 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b668ad5e-5ccd-48ef-9a1d-a42bd4b7f3ab-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "b668ad5e-5ccd-48ef-9a1d-a42bd4b7f3ab" (UID: "b668ad5e-5ccd-48ef-9a1d-a42bd4b7f3ab"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:30:03 crc kubenswrapper[4779]: I0929 19:30:03.404000 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b668ad5e-5ccd-48ef-9a1d-a42bd4b7f3ab-kube-api-access-8h8bq" (OuterVolumeSpecName: "kube-api-access-8h8bq") pod "b668ad5e-5ccd-48ef-9a1d-a42bd4b7f3ab" (UID: "b668ad5e-5ccd-48ef-9a1d-a42bd4b7f3ab"). InnerVolumeSpecName "kube-api-access-8h8bq". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:30:03 crc kubenswrapper[4779]: I0929 19:30:03.494923 4779 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/b668ad5e-5ccd-48ef-9a1d-a42bd4b7f3ab-secret-volume\") on node \"crc\" DevicePath \"\"" Sep 29 19:30:03 crc kubenswrapper[4779]: I0929 19:30:03.494963 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8h8bq\" (UniqueName: \"kubernetes.io/projected/b668ad5e-5ccd-48ef-9a1d-a42bd4b7f3ab-kube-api-access-8h8bq\") on node \"crc\" DevicePath \"\"" Sep 29 19:30:03 crc kubenswrapper[4779]: I0929 19:30:03.494973 4779 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/b668ad5e-5ccd-48ef-9a1d-a42bd4b7f3ab-config-volume\") on node \"crc\" DevicePath \"\"" Sep 29 19:30:03 crc kubenswrapper[4779]: I0929 19:30:03.943642 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29319570-j2rbd" event={"ID":"b668ad5e-5ccd-48ef-9a1d-a42bd4b7f3ab","Type":"ContainerDied","Data":"6cae09c4fd3babae4a83aa07bc6b071f1900a6fa52259cc67c9c2efddc42b4ab"} Sep 29 19:30:03 crc kubenswrapper[4779]: I0929 19:30:03.943698 4779 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6cae09c4fd3babae4a83aa07bc6b071f1900a6fa52259cc67c9c2efddc42b4ab" Sep 29 19:30:03 crc kubenswrapper[4779]: I0929 19:30:03.943730 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319570-j2rbd" Sep 29 19:30:11 crc kubenswrapper[4779]: I0929 19:30:11.921788 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 29 19:30:13 crc kubenswrapper[4779]: I0929 19:30:13.032073 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-pkmk8" event={"ID":"76990f22-5c56-482e-a5a6-6d3c74bba7cd","Type":"ContainerStarted","Data":"1b49320d42cf4f5c4330b8a551582e5ddc5fb09cda6dce4c7d4eb019ff49db67"} Sep 29 19:30:13 crc kubenswrapper[4779]: I0929 19:30:13.054286 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-pkmk8" podStartSLOduration=2.38335855 podStartE2EDuration="12.054260868s" podCreationTimestamp="2025-09-29 19:30:01 +0000 UTC" firstStartedPulling="2025-09-29 19:30:02.24854608 +0000 UTC m=+1313.132971200" lastFinishedPulling="2025-09-29 19:30:11.919448428 +0000 UTC m=+1322.803873518" observedRunningTime="2025-09-29 19:30:13.048220833 +0000 UTC m=+1323.932645963" watchObservedRunningTime="2025-09-29 19:30:13.054260868 +0000 UTC m=+1323.938685968" Sep 29 19:30:16 crc kubenswrapper[4779]: I0929 19:30:16.396637 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-server-0" Sep 29 19:30:16 crc kubenswrapper[4779]: I0929 19:30:16.920589 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-cell1-server-0" Sep 29 19:30:24 crc kubenswrapper[4779]: I0929 19:30:24.146348 4779 generic.go:334] "Generic (PLEG): container finished" podID="76990f22-5c56-482e-a5a6-6d3c74bba7cd" containerID="1b49320d42cf4f5c4330b8a551582e5ddc5fb09cda6dce4c7d4eb019ff49db67" exitCode=0 Sep 29 19:30:24 crc kubenswrapper[4779]: I0929 19:30:24.146426 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-pkmk8" event={"ID":"76990f22-5c56-482e-a5a6-6d3c74bba7cd","Type":"ContainerDied","Data":"1b49320d42cf4f5c4330b8a551582e5ddc5fb09cda6dce4c7d4eb019ff49db67"} Sep 29 19:30:25 crc kubenswrapper[4779]: I0929 19:30:25.664714 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-pkmk8" Sep 29 19:30:25 crc kubenswrapper[4779]: I0929 19:30:25.779103 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/76990f22-5c56-482e-a5a6-6d3c74bba7cd-repo-setup-combined-ca-bundle\") pod \"76990f22-5c56-482e-a5a6-6d3c74bba7cd\" (UID: \"76990f22-5c56-482e-a5a6-6d3c74bba7cd\") " Sep 29 19:30:25 crc kubenswrapper[4779]: I0929 19:30:25.779222 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4l4k9\" (UniqueName: \"kubernetes.io/projected/76990f22-5c56-482e-a5a6-6d3c74bba7cd-kube-api-access-4l4k9\") pod \"76990f22-5c56-482e-a5a6-6d3c74bba7cd\" (UID: \"76990f22-5c56-482e-a5a6-6d3c74bba7cd\") " Sep 29 19:30:25 crc kubenswrapper[4779]: I0929 19:30:25.779291 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/76990f22-5c56-482e-a5a6-6d3c74bba7cd-inventory\") pod \"76990f22-5c56-482e-a5a6-6d3c74bba7cd\" (UID: \"76990f22-5c56-482e-a5a6-6d3c74bba7cd\") " Sep 29 19:30:25 crc kubenswrapper[4779]: I0929 19:30:25.779407 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/76990f22-5c56-482e-a5a6-6d3c74bba7cd-ssh-key\") pod \"76990f22-5c56-482e-a5a6-6d3c74bba7cd\" (UID: \"76990f22-5c56-482e-a5a6-6d3c74bba7cd\") " Sep 29 19:30:25 crc kubenswrapper[4779]: I0929 19:30:25.784688 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/76990f22-5c56-482e-a5a6-6d3c74bba7cd-repo-setup-combined-ca-bundle" (OuterVolumeSpecName: "repo-setup-combined-ca-bundle") pod "76990f22-5c56-482e-a5a6-6d3c74bba7cd" (UID: "76990f22-5c56-482e-a5a6-6d3c74bba7cd"). InnerVolumeSpecName "repo-setup-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:30:25 crc kubenswrapper[4779]: I0929 19:30:25.785454 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/76990f22-5c56-482e-a5a6-6d3c74bba7cd-kube-api-access-4l4k9" (OuterVolumeSpecName: "kube-api-access-4l4k9") pod "76990f22-5c56-482e-a5a6-6d3c74bba7cd" (UID: "76990f22-5c56-482e-a5a6-6d3c74bba7cd"). InnerVolumeSpecName "kube-api-access-4l4k9". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:30:25 crc kubenswrapper[4779]: I0929 19:30:25.817291 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/76990f22-5c56-482e-a5a6-6d3c74bba7cd-inventory" (OuterVolumeSpecName: "inventory") pod "76990f22-5c56-482e-a5a6-6d3c74bba7cd" (UID: "76990f22-5c56-482e-a5a6-6d3c74bba7cd"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:30:25 crc kubenswrapper[4779]: I0929 19:30:25.825139 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/76990f22-5c56-482e-a5a6-6d3c74bba7cd-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "76990f22-5c56-482e-a5a6-6d3c74bba7cd" (UID: "76990f22-5c56-482e-a5a6-6d3c74bba7cd"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:30:25 crc kubenswrapper[4779]: I0929 19:30:25.882867 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4l4k9\" (UniqueName: \"kubernetes.io/projected/76990f22-5c56-482e-a5a6-6d3c74bba7cd-kube-api-access-4l4k9\") on node \"crc\" DevicePath \"\"" Sep 29 19:30:25 crc kubenswrapper[4779]: I0929 19:30:25.882911 4779 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/76990f22-5c56-482e-a5a6-6d3c74bba7cd-inventory\") on node \"crc\" DevicePath \"\"" Sep 29 19:30:25 crc kubenswrapper[4779]: I0929 19:30:25.882928 4779 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/76990f22-5c56-482e-a5a6-6d3c74bba7cd-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 29 19:30:25 crc kubenswrapper[4779]: I0929 19:30:25.882945 4779 reconciler_common.go:293] "Volume detached for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/76990f22-5c56-482e-a5a6-6d3c74bba7cd-repo-setup-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 19:30:26 crc kubenswrapper[4779]: I0929 19:30:26.174660 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-pkmk8" event={"ID":"76990f22-5c56-482e-a5a6-6d3c74bba7cd","Type":"ContainerDied","Data":"b9cabd95befee2a3a96203f2d3a3e045900a9a8f18678ad01391fb008db39fca"} Sep 29 19:30:26 crc kubenswrapper[4779]: I0929 19:30:26.175118 4779 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b9cabd95befee2a3a96203f2d3a3e045900a9a8f18678ad01391fb008db39fca" Sep 29 19:30:26 crc kubenswrapper[4779]: I0929 19:30:26.174732 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-pkmk8" Sep 29 19:30:26 crc kubenswrapper[4779]: I0929 19:30:26.277091 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/redhat-edpm-deployment-openstack-edpm-ipam-dht8n"] Sep 29 19:30:26 crc kubenswrapper[4779]: E0929 19:30:26.277743 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b668ad5e-5ccd-48ef-9a1d-a42bd4b7f3ab" containerName="collect-profiles" Sep 29 19:30:26 crc kubenswrapper[4779]: I0929 19:30:26.277770 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="b668ad5e-5ccd-48ef-9a1d-a42bd4b7f3ab" containerName="collect-profiles" Sep 29 19:30:26 crc kubenswrapper[4779]: E0929 19:30:26.277795 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="76990f22-5c56-482e-a5a6-6d3c74bba7cd" containerName="repo-setup-edpm-deployment-openstack-edpm-ipam" Sep 29 19:30:26 crc kubenswrapper[4779]: I0929 19:30:26.277809 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="76990f22-5c56-482e-a5a6-6d3c74bba7cd" containerName="repo-setup-edpm-deployment-openstack-edpm-ipam" Sep 29 19:30:26 crc kubenswrapper[4779]: I0929 19:30:26.279268 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="76990f22-5c56-482e-a5a6-6d3c74bba7cd" containerName="repo-setup-edpm-deployment-openstack-edpm-ipam" Sep 29 19:30:26 crc kubenswrapper[4779]: I0929 19:30:26.279349 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="b668ad5e-5ccd-48ef-9a1d-a42bd4b7f3ab" containerName="collect-profiles" Sep 29 19:30:26 crc kubenswrapper[4779]: I0929 19:30:26.280353 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-dht8n" Sep 29 19:30:26 crc kubenswrapper[4779]: I0929 19:30:26.282814 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-v8454" Sep 29 19:30:26 crc kubenswrapper[4779]: I0929 19:30:26.283222 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Sep 29 19:30:26 crc kubenswrapper[4779]: I0929 19:30:26.284423 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 29 19:30:26 crc kubenswrapper[4779]: I0929 19:30:26.285400 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Sep 29 19:30:26 crc kubenswrapper[4779]: I0929 19:30:26.295102 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/redhat-edpm-deployment-openstack-edpm-ipam-dht8n"] Sep 29 19:30:26 crc kubenswrapper[4779]: I0929 19:30:26.394949 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/630eccd4-a2c9-4003-b315-2d8d18ebeeba-inventory\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-dht8n\" (UID: \"630eccd4-a2c9-4003-b315-2d8d18ebeeba\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-dht8n" Sep 29 19:30:26 crc kubenswrapper[4779]: I0929 19:30:26.394998 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5nr62\" (UniqueName: \"kubernetes.io/projected/630eccd4-a2c9-4003-b315-2d8d18ebeeba-kube-api-access-5nr62\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-dht8n\" (UID: \"630eccd4-a2c9-4003-b315-2d8d18ebeeba\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-dht8n" Sep 29 19:30:26 crc kubenswrapper[4779]: I0929 19:30:26.395210 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/630eccd4-a2c9-4003-b315-2d8d18ebeeba-ssh-key\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-dht8n\" (UID: \"630eccd4-a2c9-4003-b315-2d8d18ebeeba\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-dht8n" Sep 29 19:30:26 crc kubenswrapper[4779]: I0929 19:30:26.496947 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/630eccd4-a2c9-4003-b315-2d8d18ebeeba-inventory\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-dht8n\" (UID: \"630eccd4-a2c9-4003-b315-2d8d18ebeeba\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-dht8n" Sep 29 19:30:26 crc kubenswrapper[4779]: I0929 19:30:26.496999 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5nr62\" (UniqueName: \"kubernetes.io/projected/630eccd4-a2c9-4003-b315-2d8d18ebeeba-kube-api-access-5nr62\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-dht8n\" (UID: \"630eccd4-a2c9-4003-b315-2d8d18ebeeba\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-dht8n" Sep 29 19:30:26 crc kubenswrapper[4779]: I0929 19:30:26.497075 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/630eccd4-a2c9-4003-b315-2d8d18ebeeba-ssh-key\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-dht8n\" (UID: \"630eccd4-a2c9-4003-b315-2d8d18ebeeba\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-dht8n" Sep 29 19:30:26 crc kubenswrapper[4779]: I0929 19:30:26.503229 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/630eccd4-a2c9-4003-b315-2d8d18ebeeba-inventory\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-dht8n\" (UID: \"630eccd4-a2c9-4003-b315-2d8d18ebeeba\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-dht8n" Sep 29 19:30:26 crc kubenswrapper[4779]: I0929 19:30:26.504062 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/630eccd4-a2c9-4003-b315-2d8d18ebeeba-ssh-key\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-dht8n\" (UID: \"630eccd4-a2c9-4003-b315-2d8d18ebeeba\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-dht8n" Sep 29 19:30:26 crc kubenswrapper[4779]: I0929 19:30:26.513282 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5nr62\" (UniqueName: \"kubernetes.io/projected/630eccd4-a2c9-4003-b315-2d8d18ebeeba-kube-api-access-5nr62\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-dht8n\" (UID: \"630eccd4-a2c9-4003-b315-2d8d18ebeeba\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-dht8n" Sep 29 19:30:26 crc kubenswrapper[4779]: I0929 19:30:26.619314 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-dht8n" Sep 29 19:30:27 crc kubenswrapper[4779]: W0929 19:30:27.216610 4779 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod630eccd4_a2c9_4003_b315_2d8d18ebeeba.slice/crio-e77a8de9137b3cda99ad065836ef32744fb06111bb6b1d9e1dd5a519bc504e00 WatchSource:0}: Error finding container e77a8de9137b3cda99ad065836ef32744fb06111bb6b1d9e1dd5a519bc504e00: Status 404 returned error can't find the container with id e77a8de9137b3cda99ad065836ef32744fb06111bb6b1d9e1dd5a519bc504e00 Sep 29 19:30:27 crc kubenswrapper[4779]: I0929 19:30:27.220932 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/redhat-edpm-deployment-openstack-edpm-ipam-dht8n"] Sep 29 19:30:28 crc kubenswrapper[4779]: I0929 19:30:28.201028 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-dht8n" event={"ID":"630eccd4-a2c9-4003-b315-2d8d18ebeeba","Type":"ContainerStarted","Data":"54cc2dfda1ed3f33bf7036dcbade27e52bfcd67b87d40e93c8590da7fb286c31"} Sep 29 19:30:28 crc kubenswrapper[4779]: I0929 19:30:28.201776 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-dht8n" event={"ID":"630eccd4-a2c9-4003-b315-2d8d18ebeeba","Type":"ContainerStarted","Data":"e77a8de9137b3cda99ad065836ef32744fb06111bb6b1d9e1dd5a519bc504e00"} Sep 29 19:30:28 crc kubenswrapper[4779]: I0929 19:30:28.231155 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-dht8n" podStartSLOduration=1.7695814159999999 podStartE2EDuration="2.231135692s" podCreationTimestamp="2025-09-29 19:30:26 +0000 UTC" firstStartedPulling="2025-09-29 19:30:27.220854078 +0000 UTC m=+1338.105279178" lastFinishedPulling="2025-09-29 19:30:27.682408344 +0000 UTC m=+1338.566833454" observedRunningTime="2025-09-29 19:30:28.22556075 +0000 UTC m=+1339.109985860" watchObservedRunningTime="2025-09-29 19:30:28.231135692 +0000 UTC m=+1339.115560792" Sep 29 19:30:31 crc kubenswrapper[4779]: I0929 19:30:31.241295 4779 generic.go:334] "Generic (PLEG): container finished" podID="630eccd4-a2c9-4003-b315-2d8d18ebeeba" containerID="54cc2dfda1ed3f33bf7036dcbade27e52bfcd67b87d40e93c8590da7fb286c31" exitCode=0 Sep 29 19:30:31 crc kubenswrapper[4779]: I0929 19:30:31.241404 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-dht8n" event={"ID":"630eccd4-a2c9-4003-b315-2d8d18ebeeba","Type":"ContainerDied","Data":"54cc2dfda1ed3f33bf7036dcbade27e52bfcd67b87d40e93c8590da7fb286c31"} Sep 29 19:30:32 crc kubenswrapper[4779]: I0929 19:30:32.744497 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-dht8n" Sep 29 19:30:32 crc kubenswrapper[4779]: I0929 19:30:32.820231 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5nr62\" (UniqueName: \"kubernetes.io/projected/630eccd4-a2c9-4003-b315-2d8d18ebeeba-kube-api-access-5nr62\") pod \"630eccd4-a2c9-4003-b315-2d8d18ebeeba\" (UID: \"630eccd4-a2c9-4003-b315-2d8d18ebeeba\") " Sep 29 19:30:32 crc kubenswrapper[4779]: I0929 19:30:32.820617 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/630eccd4-a2c9-4003-b315-2d8d18ebeeba-ssh-key\") pod \"630eccd4-a2c9-4003-b315-2d8d18ebeeba\" (UID: \"630eccd4-a2c9-4003-b315-2d8d18ebeeba\") " Sep 29 19:30:32 crc kubenswrapper[4779]: I0929 19:30:32.820752 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/630eccd4-a2c9-4003-b315-2d8d18ebeeba-inventory\") pod \"630eccd4-a2c9-4003-b315-2d8d18ebeeba\" (UID: \"630eccd4-a2c9-4003-b315-2d8d18ebeeba\") " Sep 29 19:30:32 crc kubenswrapper[4779]: I0929 19:30:32.826667 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/630eccd4-a2c9-4003-b315-2d8d18ebeeba-kube-api-access-5nr62" (OuterVolumeSpecName: "kube-api-access-5nr62") pod "630eccd4-a2c9-4003-b315-2d8d18ebeeba" (UID: "630eccd4-a2c9-4003-b315-2d8d18ebeeba"). InnerVolumeSpecName "kube-api-access-5nr62". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:30:32 crc kubenswrapper[4779]: I0929 19:30:32.851174 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/630eccd4-a2c9-4003-b315-2d8d18ebeeba-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "630eccd4-a2c9-4003-b315-2d8d18ebeeba" (UID: "630eccd4-a2c9-4003-b315-2d8d18ebeeba"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:30:32 crc kubenswrapper[4779]: I0929 19:30:32.856639 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/630eccd4-a2c9-4003-b315-2d8d18ebeeba-inventory" (OuterVolumeSpecName: "inventory") pod "630eccd4-a2c9-4003-b315-2d8d18ebeeba" (UID: "630eccd4-a2c9-4003-b315-2d8d18ebeeba"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:30:32 crc kubenswrapper[4779]: I0929 19:30:32.923426 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5nr62\" (UniqueName: \"kubernetes.io/projected/630eccd4-a2c9-4003-b315-2d8d18ebeeba-kube-api-access-5nr62\") on node \"crc\" DevicePath \"\"" Sep 29 19:30:32 crc kubenswrapper[4779]: I0929 19:30:32.923463 4779 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/630eccd4-a2c9-4003-b315-2d8d18ebeeba-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 29 19:30:32 crc kubenswrapper[4779]: I0929 19:30:32.923474 4779 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/630eccd4-a2c9-4003-b315-2d8d18ebeeba-inventory\") on node \"crc\" DevicePath \"\"" Sep 29 19:30:33 crc kubenswrapper[4779]: I0929 19:30:33.266032 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-dht8n" event={"ID":"630eccd4-a2c9-4003-b315-2d8d18ebeeba","Type":"ContainerDied","Data":"e77a8de9137b3cda99ad065836ef32744fb06111bb6b1d9e1dd5a519bc504e00"} Sep 29 19:30:33 crc kubenswrapper[4779]: I0929 19:30:33.266097 4779 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e77a8de9137b3cda99ad065836ef32744fb06111bb6b1d9e1dd5a519bc504e00" Sep 29 19:30:33 crc kubenswrapper[4779]: I0929 19:30:33.266178 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-dht8n" Sep 29 19:30:33 crc kubenswrapper[4779]: I0929 19:30:33.351081 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-d25cs"] Sep 29 19:30:33 crc kubenswrapper[4779]: E0929 19:30:33.351487 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="630eccd4-a2c9-4003-b315-2d8d18ebeeba" containerName="redhat-edpm-deployment-openstack-edpm-ipam" Sep 29 19:30:33 crc kubenswrapper[4779]: I0929 19:30:33.351505 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="630eccd4-a2c9-4003-b315-2d8d18ebeeba" containerName="redhat-edpm-deployment-openstack-edpm-ipam" Sep 29 19:30:33 crc kubenswrapper[4779]: I0929 19:30:33.351696 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="630eccd4-a2c9-4003-b315-2d8d18ebeeba" containerName="redhat-edpm-deployment-openstack-edpm-ipam" Sep 29 19:30:33 crc kubenswrapper[4779]: I0929 19:30:33.352238 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-d25cs" Sep 29 19:30:33 crc kubenswrapper[4779]: I0929 19:30:33.354918 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Sep 29 19:30:33 crc kubenswrapper[4779]: I0929 19:30:33.355090 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 29 19:30:33 crc kubenswrapper[4779]: I0929 19:30:33.355206 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-v8454" Sep 29 19:30:33 crc kubenswrapper[4779]: I0929 19:30:33.355837 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Sep 29 19:30:33 crc kubenswrapper[4779]: I0929 19:30:33.366227 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-d25cs"] Sep 29 19:30:33 crc kubenswrapper[4779]: I0929 19:30:33.432827 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8beecba1-5edc-4f95-a9ad-49889c62c0ae-inventory\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-d25cs\" (UID: \"8beecba1-5edc-4f95-a9ad-49889c62c0ae\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-d25cs" Sep 29 19:30:33 crc kubenswrapper[4779]: I0929 19:30:33.432878 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8beecba1-5edc-4f95-a9ad-49889c62c0ae-ssh-key\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-d25cs\" (UID: \"8beecba1-5edc-4f95-a9ad-49889c62c0ae\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-d25cs" Sep 29 19:30:33 crc kubenswrapper[4779]: I0929 19:30:33.433004 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8beecba1-5edc-4f95-a9ad-49889c62c0ae-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-d25cs\" (UID: \"8beecba1-5edc-4f95-a9ad-49889c62c0ae\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-d25cs" Sep 29 19:30:33 crc kubenswrapper[4779]: I0929 19:30:33.433124 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vdqbh\" (UniqueName: \"kubernetes.io/projected/8beecba1-5edc-4f95-a9ad-49889c62c0ae-kube-api-access-vdqbh\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-d25cs\" (UID: \"8beecba1-5edc-4f95-a9ad-49889c62c0ae\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-d25cs" Sep 29 19:30:33 crc kubenswrapper[4779]: I0929 19:30:33.535158 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8beecba1-5edc-4f95-a9ad-49889c62c0ae-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-d25cs\" (UID: \"8beecba1-5edc-4f95-a9ad-49889c62c0ae\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-d25cs" Sep 29 19:30:33 crc kubenswrapper[4779]: I0929 19:30:33.535449 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vdqbh\" (UniqueName: \"kubernetes.io/projected/8beecba1-5edc-4f95-a9ad-49889c62c0ae-kube-api-access-vdqbh\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-d25cs\" (UID: \"8beecba1-5edc-4f95-a9ad-49889c62c0ae\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-d25cs" Sep 29 19:30:33 crc kubenswrapper[4779]: I0929 19:30:33.535522 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8beecba1-5edc-4f95-a9ad-49889c62c0ae-inventory\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-d25cs\" (UID: \"8beecba1-5edc-4f95-a9ad-49889c62c0ae\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-d25cs" Sep 29 19:30:33 crc kubenswrapper[4779]: I0929 19:30:33.535608 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8beecba1-5edc-4f95-a9ad-49889c62c0ae-ssh-key\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-d25cs\" (UID: \"8beecba1-5edc-4f95-a9ad-49889c62c0ae\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-d25cs" Sep 29 19:30:33 crc kubenswrapper[4779]: I0929 19:30:33.542644 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8beecba1-5edc-4f95-a9ad-49889c62c0ae-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-d25cs\" (UID: \"8beecba1-5edc-4f95-a9ad-49889c62c0ae\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-d25cs" Sep 29 19:30:33 crc kubenswrapper[4779]: I0929 19:30:33.545396 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8beecba1-5edc-4f95-a9ad-49889c62c0ae-ssh-key\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-d25cs\" (UID: \"8beecba1-5edc-4f95-a9ad-49889c62c0ae\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-d25cs" Sep 29 19:30:33 crc kubenswrapper[4779]: I0929 19:30:33.545941 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8beecba1-5edc-4f95-a9ad-49889c62c0ae-inventory\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-d25cs\" (UID: \"8beecba1-5edc-4f95-a9ad-49889c62c0ae\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-d25cs" Sep 29 19:30:33 crc kubenswrapper[4779]: I0929 19:30:33.558553 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vdqbh\" (UniqueName: \"kubernetes.io/projected/8beecba1-5edc-4f95-a9ad-49889c62c0ae-kube-api-access-vdqbh\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-d25cs\" (UID: \"8beecba1-5edc-4f95-a9ad-49889c62c0ae\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-d25cs" Sep 29 19:30:33 crc kubenswrapper[4779]: I0929 19:30:33.682552 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-d25cs" Sep 29 19:30:34 crc kubenswrapper[4779]: I0929 19:30:34.078448 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-d25cs"] Sep 29 19:30:34 crc kubenswrapper[4779]: I0929 19:30:34.278197 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-d25cs" event={"ID":"8beecba1-5edc-4f95-a9ad-49889c62c0ae","Type":"ContainerStarted","Data":"301466b0424c6f4b212697eeec7801d90b5b67639556b1dfd26ae4f6be558e9c"} Sep 29 19:30:35 crc kubenswrapper[4779]: I0929 19:30:35.292985 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-d25cs" event={"ID":"8beecba1-5edc-4f95-a9ad-49889c62c0ae","Type":"ContainerStarted","Data":"145cf52862ff3e3260778eec9fc5c1b9dc0e4cf75f77d87d9d233b60974b1cea"} Sep 29 19:31:14 crc kubenswrapper[4779]: I0929 19:31:14.270793 4779 scope.go:117] "RemoveContainer" containerID="5c0a20d704274465016a7eaa27cafa17f905c0659e4eb33186c0c8abb2ec88df" Sep 29 19:31:37 crc kubenswrapper[4779]: I0929 19:31:37.828227 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-d25cs" podStartSLOduration=64.407469557 podStartE2EDuration="1m4.828209337s" podCreationTimestamp="2025-09-29 19:30:33 +0000 UTC" firstStartedPulling="2025-09-29 19:30:34.0840724 +0000 UTC m=+1344.968497540" lastFinishedPulling="2025-09-29 19:30:34.50481218 +0000 UTC m=+1345.389237320" observedRunningTime="2025-09-29 19:30:35.314875082 +0000 UTC m=+1346.199300192" watchObservedRunningTime="2025-09-29 19:31:37.828209337 +0000 UTC m=+1408.712634437" Sep 29 19:31:37 crc kubenswrapper[4779]: I0929 19:31:37.833034 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-5g89f"] Sep 29 19:31:37 crc kubenswrapper[4779]: I0929 19:31:37.836428 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-5g89f" Sep 29 19:31:37 crc kubenswrapper[4779]: I0929 19:31:37.854385 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-5g89f"] Sep 29 19:31:37 crc kubenswrapper[4779]: I0929 19:31:37.953408 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bnnlt\" (UniqueName: \"kubernetes.io/projected/ab201d15-e626-4eaf-aee0-8760b043a466-kube-api-access-bnnlt\") pod \"redhat-marketplace-5g89f\" (UID: \"ab201d15-e626-4eaf-aee0-8760b043a466\") " pod="openshift-marketplace/redhat-marketplace-5g89f" Sep 29 19:31:37 crc kubenswrapper[4779]: I0929 19:31:37.954683 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ab201d15-e626-4eaf-aee0-8760b043a466-catalog-content\") pod \"redhat-marketplace-5g89f\" (UID: \"ab201d15-e626-4eaf-aee0-8760b043a466\") " pod="openshift-marketplace/redhat-marketplace-5g89f" Sep 29 19:31:37 crc kubenswrapper[4779]: I0929 19:31:37.954903 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ab201d15-e626-4eaf-aee0-8760b043a466-utilities\") pod \"redhat-marketplace-5g89f\" (UID: \"ab201d15-e626-4eaf-aee0-8760b043a466\") " pod="openshift-marketplace/redhat-marketplace-5g89f" Sep 29 19:31:38 crc kubenswrapper[4779]: I0929 19:31:38.056575 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ab201d15-e626-4eaf-aee0-8760b043a466-catalog-content\") pod \"redhat-marketplace-5g89f\" (UID: \"ab201d15-e626-4eaf-aee0-8760b043a466\") " pod="openshift-marketplace/redhat-marketplace-5g89f" Sep 29 19:31:38 crc kubenswrapper[4779]: I0929 19:31:38.056692 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ab201d15-e626-4eaf-aee0-8760b043a466-utilities\") pod \"redhat-marketplace-5g89f\" (UID: \"ab201d15-e626-4eaf-aee0-8760b043a466\") " pod="openshift-marketplace/redhat-marketplace-5g89f" Sep 29 19:31:38 crc kubenswrapper[4779]: I0929 19:31:38.056795 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bnnlt\" (UniqueName: \"kubernetes.io/projected/ab201d15-e626-4eaf-aee0-8760b043a466-kube-api-access-bnnlt\") pod \"redhat-marketplace-5g89f\" (UID: \"ab201d15-e626-4eaf-aee0-8760b043a466\") " pod="openshift-marketplace/redhat-marketplace-5g89f" Sep 29 19:31:38 crc kubenswrapper[4779]: I0929 19:31:38.057141 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ab201d15-e626-4eaf-aee0-8760b043a466-catalog-content\") pod \"redhat-marketplace-5g89f\" (UID: \"ab201d15-e626-4eaf-aee0-8760b043a466\") " pod="openshift-marketplace/redhat-marketplace-5g89f" Sep 29 19:31:38 crc kubenswrapper[4779]: I0929 19:31:38.057224 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ab201d15-e626-4eaf-aee0-8760b043a466-utilities\") pod \"redhat-marketplace-5g89f\" (UID: \"ab201d15-e626-4eaf-aee0-8760b043a466\") " pod="openshift-marketplace/redhat-marketplace-5g89f" Sep 29 19:31:38 crc kubenswrapper[4779]: I0929 19:31:38.078538 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bnnlt\" (UniqueName: \"kubernetes.io/projected/ab201d15-e626-4eaf-aee0-8760b043a466-kube-api-access-bnnlt\") pod \"redhat-marketplace-5g89f\" (UID: \"ab201d15-e626-4eaf-aee0-8760b043a466\") " pod="openshift-marketplace/redhat-marketplace-5g89f" Sep 29 19:31:38 crc kubenswrapper[4779]: I0929 19:31:38.169400 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-5g89f" Sep 29 19:31:38 crc kubenswrapper[4779]: I0929 19:31:38.672538 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-5g89f"] Sep 29 19:31:38 crc kubenswrapper[4779]: W0929 19:31:38.688853 4779 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podab201d15_e626_4eaf_aee0_8760b043a466.slice/crio-209c388a86bd46a571fcc92dce52f98baa2103241a74497b478a2d69a7c952fb WatchSource:0}: Error finding container 209c388a86bd46a571fcc92dce52f98baa2103241a74497b478a2d69a7c952fb: Status 404 returned error can't find the container with id 209c388a86bd46a571fcc92dce52f98baa2103241a74497b478a2d69a7c952fb Sep 29 19:31:39 crc kubenswrapper[4779]: I0929 19:31:39.004088 4779 generic.go:334] "Generic (PLEG): container finished" podID="ab201d15-e626-4eaf-aee0-8760b043a466" containerID="4d643a5404028cbcbc9539a71cd63e0842f13227c564eb048f6caee1071b62bd" exitCode=0 Sep 29 19:31:39 crc kubenswrapper[4779]: I0929 19:31:39.004340 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-5g89f" event={"ID":"ab201d15-e626-4eaf-aee0-8760b043a466","Type":"ContainerDied","Data":"4d643a5404028cbcbc9539a71cd63e0842f13227c564eb048f6caee1071b62bd"} Sep 29 19:31:39 crc kubenswrapper[4779]: I0929 19:31:39.004462 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-5g89f" event={"ID":"ab201d15-e626-4eaf-aee0-8760b043a466","Type":"ContainerStarted","Data":"209c388a86bd46a571fcc92dce52f98baa2103241a74497b478a2d69a7c952fb"} Sep 29 19:31:40 crc kubenswrapper[4779]: I0929 19:31:40.016402 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-5g89f" event={"ID":"ab201d15-e626-4eaf-aee0-8760b043a466","Type":"ContainerStarted","Data":"df5a6d8c27554aa0314043a17da848cc914da320af8eb5bb25362cd7f1c58c2f"} Sep 29 19:31:41 crc kubenswrapper[4779]: I0929 19:31:41.031430 4779 generic.go:334] "Generic (PLEG): container finished" podID="ab201d15-e626-4eaf-aee0-8760b043a466" containerID="df5a6d8c27554aa0314043a17da848cc914da320af8eb5bb25362cd7f1c58c2f" exitCode=0 Sep 29 19:31:41 crc kubenswrapper[4779]: I0929 19:31:41.031641 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-5g89f" event={"ID":"ab201d15-e626-4eaf-aee0-8760b043a466","Type":"ContainerDied","Data":"df5a6d8c27554aa0314043a17da848cc914da320af8eb5bb25362cd7f1c58c2f"} Sep 29 19:31:42 crc kubenswrapper[4779]: I0929 19:31:42.048577 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-5g89f" event={"ID":"ab201d15-e626-4eaf-aee0-8760b043a466","Type":"ContainerStarted","Data":"adede0c739d631be2186e587f2294a2e7271ff3b8edddedf87598a1cb00d78c2"} Sep 29 19:31:42 crc kubenswrapper[4779]: I0929 19:31:42.081868 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-5g89f" podStartSLOduration=2.603130042 podStartE2EDuration="5.081841094s" podCreationTimestamp="2025-09-29 19:31:37 +0000 UTC" firstStartedPulling="2025-09-29 19:31:39.007698772 +0000 UTC m=+1409.892123872" lastFinishedPulling="2025-09-29 19:31:41.486409834 +0000 UTC m=+1412.370834924" observedRunningTime="2025-09-29 19:31:42.075910844 +0000 UTC m=+1412.960335984" watchObservedRunningTime="2025-09-29 19:31:42.081841094 +0000 UTC m=+1412.966266234" Sep 29 19:31:43 crc kubenswrapper[4779]: I0929 19:31:43.788157 4779 patch_prober.go:28] interesting pod/machine-config-daemon-d5cnr container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 19:31:43 crc kubenswrapper[4779]: I0929 19:31:43.788473 4779 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" podUID="476bc421-1113-455e-bcc8-e207e47dad19" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 19:31:48 crc kubenswrapper[4779]: I0929 19:31:48.170551 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-5g89f" Sep 29 19:31:48 crc kubenswrapper[4779]: I0929 19:31:48.171274 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-5g89f" Sep 29 19:31:48 crc kubenswrapper[4779]: I0929 19:31:48.223670 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-5g89f" Sep 29 19:31:49 crc kubenswrapper[4779]: I0929 19:31:49.191964 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-5g89f" Sep 29 19:31:49 crc kubenswrapper[4779]: I0929 19:31:49.262292 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-5g89f"] Sep 29 19:31:51 crc kubenswrapper[4779]: I0929 19:31:51.152277 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-5g89f" podUID="ab201d15-e626-4eaf-aee0-8760b043a466" containerName="registry-server" containerID="cri-o://adede0c739d631be2186e587f2294a2e7271ff3b8edddedf87598a1cb00d78c2" gracePeriod=2 Sep 29 19:31:51 crc kubenswrapper[4779]: I0929 19:31:51.629770 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-5g89f" Sep 29 19:31:51 crc kubenswrapper[4779]: I0929 19:31:51.815210 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ab201d15-e626-4eaf-aee0-8760b043a466-utilities\") pod \"ab201d15-e626-4eaf-aee0-8760b043a466\" (UID: \"ab201d15-e626-4eaf-aee0-8760b043a466\") " Sep 29 19:31:51 crc kubenswrapper[4779]: I0929 19:31:51.815293 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ab201d15-e626-4eaf-aee0-8760b043a466-catalog-content\") pod \"ab201d15-e626-4eaf-aee0-8760b043a466\" (UID: \"ab201d15-e626-4eaf-aee0-8760b043a466\") " Sep 29 19:31:51 crc kubenswrapper[4779]: I0929 19:31:51.815612 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bnnlt\" (UniqueName: \"kubernetes.io/projected/ab201d15-e626-4eaf-aee0-8760b043a466-kube-api-access-bnnlt\") pod \"ab201d15-e626-4eaf-aee0-8760b043a466\" (UID: \"ab201d15-e626-4eaf-aee0-8760b043a466\") " Sep 29 19:31:51 crc kubenswrapper[4779]: I0929 19:31:51.816949 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ab201d15-e626-4eaf-aee0-8760b043a466-utilities" (OuterVolumeSpecName: "utilities") pod "ab201d15-e626-4eaf-aee0-8760b043a466" (UID: "ab201d15-e626-4eaf-aee0-8760b043a466"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 19:31:51 crc kubenswrapper[4779]: I0929 19:31:51.829201 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ab201d15-e626-4eaf-aee0-8760b043a466-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "ab201d15-e626-4eaf-aee0-8760b043a466" (UID: "ab201d15-e626-4eaf-aee0-8760b043a466"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 19:31:51 crc kubenswrapper[4779]: I0929 19:31:51.829685 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ab201d15-e626-4eaf-aee0-8760b043a466-kube-api-access-bnnlt" (OuterVolumeSpecName: "kube-api-access-bnnlt") pod "ab201d15-e626-4eaf-aee0-8760b043a466" (UID: "ab201d15-e626-4eaf-aee0-8760b043a466"). InnerVolumeSpecName "kube-api-access-bnnlt". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:31:51 crc kubenswrapper[4779]: I0929 19:31:51.918083 4779 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ab201d15-e626-4eaf-aee0-8760b043a466-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 19:31:51 crc kubenswrapper[4779]: I0929 19:31:51.918119 4779 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ab201d15-e626-4eaf-aee0-8760b043a466-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 19:31:51 crc kubenswrapper[4779]: I0929 19:31:51.918170 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bnnlt\" (UniqueName: \"kubernetes.io/projected/ab201d15-e626-4eaf-aee0-8760b043a466-kube-api-access-bnnlt\") on node \"crc\" DevicePath \"\"" Sep 29 19:31:52 crc kubenswrapper[4779]: I0929 19:31:52.166183 4779 generic.go:334] "Generic (PLEG): container finished" podID="ab201d15-e626-4eaf-aee0-8760b043a466" containerID="adede0c739d631be2186e587f2294a2e7271ff3b8edddedf87598a1cb00d78c2" exitCode=0 Sep 29 19:31:52 crc kubenswrapper[4779]: I0929 19:31:52.166232 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-5g89f" event={"ID":"ab201d15-e626-4eaf-aee0-8760b043a466","Type":"ContainerDied","Data":"adede0c739d631be2186e587f2294a2e7271ff3b8edddedf87598a1cb00d78c2"} Sep 29 19:31:52 crc kubenswrapper[4779]: I0929 19:31:52.166260 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-5g89f" Sep 29 19:31:52 crc kubenswrapper[4779]: I0929 19:31:52.166616 4779 scope.go:117] "RemoveContainer" containerID="adede0c739d631be2186e587f2294a2e7271ff3b8edddedf87598a1cb00d78c2" Sep 29 19:31:52 crc kubenswrapper[4779]: I0929 19:31:52.166598 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-5g89f" event={"ID":"ab201d15-e626-4eaf-aee0-8760b043a466","Type":"ContainerDied","Data":"209c388a86bd46a571fcc92dce52f98baa2103241a74497b478a2d69a7c952fb"} Sep 29 19:31:52 crc kubenswrapper[4779]: I0929 19:31:52.200372 4779 scope.go:117] "RemoveContainer" containerID="df5a6d8c27554aa0314043a17da848cc914da320af8eb5bb25362cd7f1c58c2f" Sep 29 19:31:52 crc kubenswrapper[4779]: I0929 19:31:52.213900 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-5g89f"] Sep 29 19:31:52 crc kubenswrapper[4779]: I0929 19:31:52.243554 4779 scope.go:117] "RemoveContainer" containerID="4d643a5404028cbcbc9539a71cd63e0842f13227c564eb048f6caee1071b62bd" Sep 29 19:31:52 crc kubenswrapper[4779]: I0929 19:31:52.243682 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-5g89f"] Sep 29 19:31:52 crc kubenswrapper[4779]: I0929 19:31:52.281569 4779 scope.go:117] "RemoveContainer" containerID="adede0c739d631be2186e587f2294a2e7271ff3b8edddedf87598a1cb00d78c2" Sep 29 19:31:52 crc kubenswrapper[4779]: E0929 19:31:52.281890 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"adede0c739d631be2186e587f2294a2e7271ff3b8edddedf87598a1cb00d78c2\": container with ID starting with adede0c739d631be2186e587f2294a2e7271ff3b8edddedf87598a1cb00d78c2 not found: ID does not exist" containerID="adede0c739d631be2186e587f2294a2e7271ff3b8edddedf87598a1cb00d78c2" Sep 29 19:31:52 crc kubenswrapper[4779]: I0929 19:31:52.281922 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"adede0c739d631be2186e587f2294a2e7271ff3b8edddedf87598a1cb00d78c2"} err="failed to get container status \"adede0c739d631be2186e587f2294a2e7271ff3b8edddedf87598a1cb00d78c2\": rpc error: code = NotFound desc = could not find container \"adede0c739d631be2186e587f2294a2e7271ff3b8edddedf87598a1cb00d78c2\": container with ID starting with adede0c739d631be2186e587f2294a2e7271ff3b8edddedf87598a1cb00d78c2 not found: ID does not exist" Sep 29 19:31:52 crc kubenswrapper[4779]: I0929 19:31:52.281942 4779 scope.go:117] "RemoveContainer" containerID="df5a6d8c27554aa0314043a17da848cc914da320af8eb5bb25362cd7f1c58c2f" Sep 29 19:31:52 crc kubenswrapper[4779]: E0929 19:31:52.282213 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"df5a6d8c27554aa0314043a17da848cc914da320af8eb5bb25362cd7f1c58c2f\": container with ID starting with df5a6d8c27554aa0314043a17da848cc914da320af8eb5bb25362cd7f1c58c2f not found: ID does not exist" containerID="df5a6d8c27554aa0314043a17da848cc914da320af8eb5bb25362cd7f1c58c2f" Sep 29 19:31:52 crc kubenswrapper[4779]: I0929 19:31:52.282235 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"df5a6d8c27554aa0314043a17da848cc914da320af8eb5bb25362cd7f1c58c2f"} err="failed to get container status \"df5a6d8c27554aa0314043a17da848cc914da320af8eb5bb25362cd7f1c58c2f\": rpc error: code = NotFound desc = could not find container \"df5a6d8c27554aa0314043a17da848cc914da320af8eb5bb25362cd7f1c58c2f\": container with ID starting with df5a6d8c27554aa0314043a17da848cc914da320af8eb5bb25362cd7f1c58c2f not found: ID does not exist" Sep 29 19:31:52 crc kubenswrapper[4779]: I0929 19:31:52.282247 4779 scope.go:117] "RemoveContainer" containerID="4d643a5404028cbcbc9539a71cd63e0842f13227c564eb048f6caee1071b62bd" Sep 29 19:31:52 crc kubenswrapper[4779]: E0929 19:31:52.282509 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4d643a5404028cbcbc9539a71cd63e0842f13227c564eb048f6caee1071b62bd\": container with ID starting with 4d643a5404028cbcbc9539a71cd63e0842f13227c564eb048f6caee1071b62bd not found: ID does not exist" containerID="4d643a5404028cbcbc9539a71cd63e0842f13227c564eb048f6caee1071b62bd" Sep 29 19:31:52 crc kubenswrapper[4779]: I0929 19:31:52.282528 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4d643a5404028cbcbc9539a71cd63e0842f13227c564eb048f6caee1071b62bd"} err="failed to get container status \"4d643a5404028cbcbc9539a71cd63e0842f13227c564eb048f6caee1071b62bd\": rpc error: code = NotFound desc = could not find container \"4d643a5404028cbcbc9539a71cd63e0842f13227c564eb048f6caee1071b62bd\": container with ID starting with 4d643a5404028cbcbc9539a71cd63e0842f13227c564eb048f6caee1071b62bd not found: ID does not exist" Sep 29 19:31:53 crc kubenswrapper[4779]: I0929 19:31:53.783636 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ab201d15-e626-4eaf-aee0-8760b043a466" path="/var/lib/kubelet/pods/ab201d15-e626-4eaf-aee0-8760b043a466/volumes" Sep 29 19:32:13 crc kubenswrapper[4779]: I0929 19:32:13.784993 4779 patch_prober.go:28] interesting pod/machine-config-daemon-d5cnr container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 19:32:13 crc kubenswrapper[4779]: I0929 19:32:13.785477 4779 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" podUID="476bc421-1113-455e-bcc8-e207e47dad19" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 19:32:14 crc kubenswrapper[4779]: I0929 19:32:14.359194 4779 scope.go:117] "RemoveContainer" containerID="9119ca905316b070b1d8ea07fa2990dbb483f21e8ff2be3166d2866e3ad7197b" Sep 29 19:32:14 crc kubenswrapper[4779]: I0929 19:32:14.441777 4779 scope.go:117] "RemoveContainer" containerID="2987f26cacd371de01cf94d756cd9e2992536d73bea4957c97162122b9b43b8a" Sep 29 19:32:14 crc kubenswrapper[4779]: I0929 19:32:14.475065 4779 scope.go:117] "RemoveContainer" containerID="ee63c651558ff4118a42199b7d391d96ea90fdca2e8705e528f0905396da6c3f" Sep 29 19:32:14 crc kubenswrapper[4779]: I0929 19:32:14.510399 4779 scope.go:117] "RemoveContainer" containerID="d415578bba65d5d24470e52ca7d0a3b4801b581d2265f0736392a3ba8a920675" Sep 29 19:32:14 crc kubenswrapper[4779]: I0929 19:32:14.568210 4779 scope.go:117] "RemoveContainer" containerID="d3217bb118bd7b17be40a662079994298427790a37cecb364347611119086955" Sep 29 19:32:29 crc kubenswrapper[4779]: I0929 19:32:29.203843 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-bcfgf"] Sep 29 19:32:29 crc kubenswrapper[4779]: E0929 19:32:29.207238 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ab201d15-e626-4eaf-aee0-8760b043a466" containerName="extract-utilities" Sep 29 19:32:29 crc kubenswrapper[4779]: I0929 19:32:29.207584 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="ab201d15-e626-4eaf-aee0-8760b043a466" containerName="extract-utilities" Sep 29 19:32:29 crc kubenswrapper[4779]: E0929 19:32:29.207602 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ab201d15-e626-4eaf-aee0-8760b043a466" containerName="extract-content" Sep 29 19:32:29 crc kubenswrapper[4779]: I0929 19:32:29.207611 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="ab201d15-e626-4eaf-aee0-8760b043a466" containerName="extract-content" Sep 29 19:32:29 crc kubenswrapper[4779]: E0929 19:32:29.207624 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ab201d15-e626-4eaf-aee0-8760b043a466" containerName="registry-server" Sep 29 19:32:29 crc kubenswrapper[4779]: I0929 19:32:29.207633 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="ab201d15-e626-4eaf-aee0-8760b043a466" containerName="registry-server" Sep 29 19:32:29 crc kubenswrapper[4779]: I0929 19:32:29.207907 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="ab201d15-e626-4eaf-aee0-8760b043a466" containerName="registry-server" Sep 29 19:32:29 crc kubenswrapper[4779]: I0929 19:32:29.209642 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-bcfgf" Sep 29 19:32:29 crc kubenswrapper[4779]: I0929 19:32:29.217872 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-bcfgf"] Sep 29 19:32:29 crc kubenswrapper[4779]: I0929 19:32:29.266191 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8cc6ce3d-fc28-42bf-936f-32c862f14079-utilities\") pod \"certified-operators-bcfgf\" (UID: \"8cc6ce3d-fc28-42bf-936f-32c862f14079\") " pod="openshift-marketplace/certified-operators-bcfgf" Sep 29 19:32:29 crc kubenswrapper[4779]: I0929 19:32:29.266347 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cqvrw\" (UniqueName: \"kubernetes.io/projected/8cc6ce3d-fc28-42bf-936f-32c862f14079-kube-api-access-cqvrw\") pod \"certified-operators-bcfgf\" (UID: \"8cc6ce3d-fc28-42bf-936f-32c862f14079\") " pod="openshift-marketplace/certified-operators-bcfgf" Sep 29 19:32:29 crc kubenswrapper[4779]: I0929 19:32:29.266400 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8cc6ce3d-fc28-42bf-936f-32c862f14079-catalog-content\") pod \"certified-operators-bcfgf\" (UID: \"8cc6ce3d-fc28-42bf-936f-32c862f14079\") " pod="openshift-marketplace/certified-operators-bcfgf" Sep 29 19:32:29 crc kubenswrapper[4779]: I0929 19:32:29.367763 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqvrw\" (UniqueName: \"kubernetes.io/projected/8cc6ce3d-fc28-42bf-936f-32c862f14079-kube-api-access-cqvrw\") pod \"certified-operators-bcfgf\" (UID: \"8cc6ce3d-fc28-42bf-936f-32c862f14079\") " pod="openshift-marketplace/certified-operators-bcfgf" Sep 29 19:32:29 crc kubenswrapper[4779]: I0929 19:32:29.367825 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8cc6ce3d-fc28-42bf-936f-32c862f14079-catalog-content\") pod \"certified-operators-bcfgf\" (UID: \"8cc6ce3d-fc28-42bf-936f-32c862f14079\") " pod="openshift-marketplace/certified-operators-bcfgf" Sep 29 19:32:29 crc kubenswrapper[4779]: I0929 19:32:29.367925 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8cc6ce3d-fc28-42bf-936f-32c862f14079-utilities\") pod \"certified-operators-bcfgf\" (UID: \"8cc6ce3d-fc28-42bf-936f-32c862f14079\") " pod="openshift-marketplace/certified-operators-bcfgf" Sep 29 19:32:29 crc kubenswrapper[4779]: I0929 19:32:29.368372 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8cc6ce3d-fc28-42bf-936f-32c862f14079-utilities\") pod \"certified-operators-bcfgf\" (UID: \"8cc6ce3d-fc28-42bf-936f-32c862f14079\") " pod="openshift-marketplace/certified-operators-bcfgf" Sep 29 19:32:29 crc kubenswrapper[4779]: I0929 19:32:29.368576 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8cc6ce3d-fc28-42bf-936f-32c862f14079-catalog-content\") pod \"certified-operators-bcfgf\" (UID: \"8cc6ce3d-fc28-42bf-936f-32c862f14079\") " pod="openshift-marketplace/certified-operators-bcfgf" Sep 29 19:32:29 crc kubenswrapper[4779]: I0929 19:32:29.388293 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cqvrw\" (UniqueName: \"kubernetes.io/projected/8cc6ce3d-fc28-42bf-936f-32c862f14079-kube-api-access-cqvrw\") pod \"certified-operators-bcfgf\" (UID: \"8cc6ce3d-fc28-42bf-936f-32c862f14079\") " pod="openshift-marketplace/certified-operators-bcfgf" Sep 29 19:32:29 crc kubenswrapper[4779]: I0929 19:32:29.535190 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-bcfgf" Sep 29 19:32:30 crc kubenswrapper[4779]: I0929 19:32:30.037294 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-bcfgf"] Sep 29 19:32:30 crc kubenswrapper[4779]: W0929 19:32:30.047458 4779 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod8cc6ce3d_fc28_42bf_936f_32c862f14079.slice/crio-eb1f6bb60a92e134ea5da9df909bf87f4acc54e648ca62dd8d3b66b82dd67f0d WatchSource:0}: Error finding container eb1f6bb60a92e134ea5da9df909bf87f4acc54e648ca62dd8d3b66b82dd67f0d: Status 404 returned error can't find the container with id eb1f6bb60a92e134ea5da9df909bf87f4acc54e648ca62dd8d3b66b82dd67f0d Sep 29 19:32:30 crc kubenswrapper[4779]: I0929 19:32:30.615116 4779 generic.go:334] "Generic (PLEG): container finished" podID="8cc6ce3d-fc28-42bf-936f-32c862f14079" containerID="46452b0af1d11377e4fe29a03d34d172a8c8e77aab0c22dc19fe1b2a2c075b2d" exitCode=0 Sep 29 19:32:30 crc kubenswrapper[4779]: I0929 19:32:30.615179 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-bcfgf" event={"ID":"8cc6ce3d-fc28-42bf-936f-32c862f14079","Type":"ContainerDied","Data":"46452b0af1d11377e4fe29a03d34d172a8c8e77aab0c22dc19fe1b2a2c075b2d"} Sep 29 19:32:30 crc kubenswrapper[4779]: I0929 19:32:30.615521 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-bcfgf" event={"ID":"8cc6ce3d-fc28-42bf-936f-32c862f14079","Type":"ContainerStarted","Data":"eb1f6bb60a92e134ea5da9df909bf87f4acc54e648ca62dd8d3b66b82dd67f0d"} Sep 29 19:32:32 crc kubenswrapper[4779]: I0929 19:32:32.640979 4779 generic.go:334] "Generic (PLEG): container finished" podID="8cc6ce3d-fc28-42bf-936f-32c862f14079" containerID="aa4516903cecae292be4dbf64ba5fac179624651699b2ac71b0957361509918d" exitCode=0 Sep 29 19:32:32 crc kubenswrapper[4779]: I0929 19:32:32.641064 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-bcfgf" event={"ID":"8cc6ce3d-fc28-42bf-936f-32c862f14079","Type":"ContainerDied","Data":"aa4516903cecae292be4dbf64ba5fac179624651699b2ac71b0957361509918d"} Sep 29 19:32:33 crc kubenswrapper[4779]: I0929 19:32:33.653382 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-bcfgf" event={"ID":"8cc6ce3d-fc28-42bf-936f-32c862f14079","Type":"ContainerStarted","Data":"573f17ab9d226985f02b4709e4f1522b29cc0d93fba35c0be7e1e571894fb381"} Sep 29 19:32:33 crc kubenswrapper[4779]: I0929 19:32:33.678968 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-bcfgf" podStartSLOduration=2.231111702 podStartE2EDuration="4.678949272s" podCreationTimestamp="2025-09-29 19:32:29 +0000 UTC" firstStartedPulling="2025-09-29 19:32:30.618812847 +0000 UTC m=+1461.503237998" lastFinishedPulling="2025-09-29 19:32:33.066650468 +0000 UTC m=+1463.951075568" observedRunningTime="2025-09-29 19:32:33.673630397 +0000 UTC m=+1464.558055497" watchObservedRunningTime="2025-09-29 19:32:33.678949272 +0000 UTC m=+1464.563374372" Sep 29 19:32:39 crc kubenswrapper[4779]: I0929 19:32:39.536794 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-bcfgf" Sep 29 19:32:39 crc kubenswrapper[4779]: I0929 19:32:39.537766 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-bcfgf" Sep 29 19:32:39 crc kubenswrapper[4779]: I0929 19:32:39.615794 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-bcfgf" Sep 29 19:32:39 crc kubenswrapper[4779]: I0929 19:32:39.776064 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-bcfgf" Sep 29 19:32:39 crc kubenswrapper[4779]: I0929 19:32:39.862076 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-bcfgf"] Sep 29 19:32:41 crc kubenswrapper[4779]: I0929 19:32:41.742662 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-bcfgf" podUID="8cc6ce3d-fc28-42bf-936f-32c862f14079" containerName="registry-server" containerID="cri-o://573f17ab9d226985f02b4709e4f1522b29cc0d93fba35c0be7e1e571894fb381" gracePeriod=2 Sep 29 19:32:42 crc kubenswrapper[4779]: I0929 19:32:42.220032 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-bcfgf" Sep 29 19:32:42 crc kubenswrapper[4779]: I0929 19:32:42.349400 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8cc6ce3d-fc28-42bf-936f-32c862f14079-utilities\") pod \"8cc6ce3d-fc28-42bf-936f-32c862f14079\" (UID: \"8cc6ce3d-fc28-42bf-936f-32c862f14079\") " Sep 29 19:32:42 crc kubenswrapper[4779]: I0929 19:32:42.349544 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8cc6ce3d-fc28-42bf-936f-32c862f14079-catalog-content\") pod \"8cc6ce3d-fc28-42bf-936f-32c862f14079\" (UID: \"8cc6ce3d-fc28-42bf-936f-32c862f14079\") " Sep 29 19:32:42 crc kubenswrapper[4779]: I0929 19:32:42.349657 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cqvrw\" (UniqueName: \"kubernetes.io/projected/8cc6ce3d-fc28-42bf-936f-32c862f14079-kube-api-access-cqvrw\") pod \"8cc6ce3d-fc28-42bf-936f-32c862f14079\" (UID: \"8cc6ce3d-fc28-42bf-936f-32c862f14079\") " Sep 29 19:32:42 crc kubenswrapper[4779]: I0929 19:32:42.350809 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8cc6ce3d-fc28-42bf-936f-32c862f14079-utilities" (OuterVolumeSpecName: "utilities") pod "8cc6ce3d-fc28-42bf-936f-32c862f14079" (UID: "8cc6ce3d-fc28-42bf-936f-32c862f14079"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 19:32:42 crc kubenswrapper[4779]: I0929 19:32:42.358027 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8cc6ce3d-fc28-42bf-936f-32c862f14079-kube-api-access-cqvrw" (OuterVolumeSpecName: "kube-api-access-cqvrw") pod "8cc6ce3d-fc28-42bf-936f-32c862f14079" (UID: "8cc6ce3d-fc28-42bf-936f-32c862f14079"). InnerVolumeSpecName "kube-api-access-cqvrw". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:32:42 crc kubenswrapper[4779]: I0929 19:32:42.451737 4779 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8cc6ce3d-fc28-42bf-936f-32c862f14079-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 19:32:42 crc kubenswrapper[4779]: I0929 19:32:42.451799 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cqvrw\" (UniqueName: \"kubernetes.io/projected/8cc6ce3d-fc28-42bf-936f-32c862f14079-kube-api-access-cqvrw\") on node \"crc\" DevicePath \"\"" Sep 29 19:32:42 crc kubenswrapper[4779]: I0929 19:32:42.758489 4779 generic.go:334] "Generic (PLEG): container finished" podID="8cc6ce3d-fc28-42bf-936f-32c862f14079" containerID="573f17ab9d226985f02b4709e4f1522b29cc0d93fba35c0be7e1e571894fb381" exitCode=0 Sep 29 19:32:42 crc kubenswrapper[4779]: I0929 19:32:42.758541 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-bcfgf" event={"ID":"8cc6ce3d-fc28-42bf-936f-32c862f14079","Type":"ContainerDied","Data":"573f17ab9d226985f02b4709e4f1522b29cc0d93fba35c0be7e1e571894fb381"} Sep 29 19:32:42 crc kubenswrapper[4779]: I0929 19:32:42.758571 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-bcfgf" event={"ID":"8cc6ce3d-fc28-42bf-936f-32c862f14079","Type":"ContainerDied","Data":"eb1f6bb60a92e134ea5da9df909bf87f4acc54e648ca62dd8d3b66b82dd67f0d"} Sep 29 19:32:42 crc kubenswrapper[4779]: I0929 19:32:42.758603 4779 scope.go:117] "RemoveContainer" containerID="573f17ab9d226985f02b4709e4f1522b29cc0d93fba35c0be7e1e571894fb381" Sep 29 19:32:42 crc kubenswrapper[4779]: I0929 19:32:42.758806 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-bcfgf" Sep 29 19:32:42 crc kubenswrapper[4779]: I0929 19:32:42.793756 4779 scope.go:117] "RemoveContainer" containerID="aa4516903cecae292be4dbf64ba5fac179624651699b2ac71b0957361509918d" Sep 29 19:32:42 crc kubenswrapper[4779]: I0929 19:32:42.819789 4779 scope.go:117] "RemoveContainer" containerID="46452b0af1d11377e4fe29a03d34d172a8c8e77aab0c22dc19fe1b2a2c075b2d" Sep 29 19:32:42 crc kubenswrapper[4779]: I0929 19:32:42.894083 4779 scope.go:117] "RemoveContainer" containerID="573f17ab9d226985f02b4709e4f1522b29cc0d93fba35c0be7e1e571894fb381" Sep 29 19:32:42 crc kubenswrapper[4779]: E0929 19:32:42.895956 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"573f17ab9d226985f02b4709e4f1522b29cc0d93fba35c0be7e1e571894fb381\": container with ID starting with 573f17ab9d226985f02b4709e4f1522b29cc0d93fba35c0be7e1e571894fb381 not found: ID does not exist" containerID="573f17ab9d226985f02b4709e4f1522b29cc0d93fba35c0be7e1e571894fb381" Sep 29 19:32:42 crc kubenswrapper[4779]: I0929 19:32:42.896010 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"573f17ab9d226985f02b4709e4f1522b29cc0d93fba35c0be7e1e571894fb381"} err="failed to get container status \"573f17ab9d226985f02b4709e4f1522b29cc0d93fba35c0be7e1e571894fb381\": rpc error: code = NotFound desc = could not find container \"573f17ab9d226985f02b4709e4f1522b29cc0d93fba35c0be7e1e571894fb381\": container with ID starting with 573f17ab9d226985f02b4709e4f1522b29cc0d93fba35c0be7e1e571894fb381 not found: ID does not exist" Sep 29 19:32:42 crc kubenswrapper[4779]: I0929 19:32:42.896044 4779 scope.go:117] "RemoveContainer" containerID="aa4516903cecae292be4dbf64ba5fac179624651699b2ac71b0957361509918d" Sep 29 19:32:42 crc kubenswrapper[4779]: E0929 19:32:42.896904 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"aa4516903cecae292be4dbf64ba5fac179624651699b2ac71b0957361509918d\": container with ID starting with aa4516903cecae292be4dbf64ba5fac179624651699b2ac71b0957361509918d not found: ID does not exist" containerID="aa4516903cecae292be4dbf64ba5fac179624651699b2ac71b0957361509918d" Sep 29 19:32:42 crc kubenswrapper[4779]: I0929 19:32:42.896965 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"aa4516903cecae292be4dbf64ba5fac179624651699b2ac71b0957361509918d"} err="failed to get container status \"aa4516903cecae292be4dbf64ba5fac179624651699b2ac71b0957361509918d\": rpc error: code = NotFound desc = could not find container \"aa4516903cecae292be4dbf64ba5fac179624651699b2ac71b0957361509918d\": container with ID starting with aa4516903cecae292be4dbf64ba5fac179624651699b2ac71b0957361509918d not found: ID does not exist" Sep 29 19:32:42 crc kubenswrapper[4779]: I0929 19:32:42.897001 4779 scope.go:117] "RemoveContainer" containerID="46452b0af1d11377e4fe29a03d34d172a8c8e77aab0c22dc19fe1b2a2c075b2d" Sep 29 19:32:42 crc kubenswrapper[4779]: E0929 19:32:42.897442 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"46452b0af1d11377e4fe29a03d34d172a8c8e77aab0c22dc19fe1b2a2c075b2d\": container with ID starting with 46452b0af1d11377e4fe29a03d34d172a8c8e77aab0c22dc19fe1b2a2c075b2d not found: ID does not exist" containerID="46452b0af1d11377e4fe29a03d34d172a8c8e77aab0c22dc19fe1b2a2c075b2d" Sep 29 19:32:42 crc kubenswrapper[4779]: I0929 19:32:42.897479 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"46452b0af1d11377e4fe29a03d34d172a8c8e77aab0c22dc19fe1b2a2c075b2d"} err="failed to get container status \"46452b0af1d11377e4fe29a03d34d172a8c8e77aab0c22dc19fe1b2a2c075b2d\": rpc error: code = NotFound desc = could not find container \"46452b0af1d11377e4fe29a03d34d172a8c8e77aab0c22dc19fe1b2a2c075b2d\": container with ID starting with 46452b0af1d11377e4fe29a03d34d172a8c8e77aab0c22dc19fe1b2a2c075b2d not found: ID does not exist" Sep 29 19:32:43 crc kubenswrapper[4779]: I0929 19:32:43.169913 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8cc6ce3d-fc28-42bf-936f-32c862f14079-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "8cc6ce3d-fc28-42bf-936f-32c862f14079" (UID: "8cc6ce3d-fc28-42bf-936f-32c862f14079"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 19:32:43 crc kubenswrapper[4779]: I0929 19:32:43.171002 4779 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8cc6ce3d-fc28-42bf-936f-32c862f14079-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 19:32:43 crc kubenswrapper[4779]: I0929 19:32:43.418502 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-bcfgf"] Sep 29 19:32:43 crc kubenswrapper[4779]: I0929 19:32:43.433132 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-bcfgf"] Sep 29 19:32:43 crc kubenswrapper[4779]: I0929 19:32:43.785297 4779 patch_prober.go:28] interesting pod/machine-config-daemon-d5cnr container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 19:32:43 crc kubenswrapper[4779]: I0929 19:32:43.785423 4779 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" podUID="476bc421-1113-455e-bcc8-e207e47dad19" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 19:32:43 crc kubenswrapper[4779]: I0929 19:32:43.786997 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8cc6ce3d-fc28-42bf-936f-32c862f14079" path="/var/lib/kubelet/pods/8cc6ce3d-fc28-42bf-936f-32c862f14079/volumes" Sep 29 19:32:43 crc kubenswrapper[4779]: I0929 19:32:43.788028 4779 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" Sep 29 19:32:43 crc kubenswrapper[4779]: I0929 19:32:43.788892 4779 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"d53b5519458bc3537a9f2faacb6e07e9c914e69c07211264054bfd272bc67ba6"} pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 29 19:32:43 crc kubenswrapper[4779]: I0929 19:32:43.789010 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" podUID="476bc421-1113-455e-bcc8-e207e47dad19" containerName="machine-config-daemon" containerID="cri-o://d53b5519458bc3537a9f2faacb6e07e9c914e69c07211264054bfd272bc67ba6" gracePeriod=600 Sep 29 19:32:44 crc kubenswrapper[4779]: I0929 19:32:44.791576 4779 generic.go:334] "Generic (PLEG): container finished" podID="476bc421-1113-455e-bcc8-e207e47dad19" containerID="d53b5519458bc3537a9f2faacb6e07e9c914e69c07211264054bfd272bc67ba6" exitCode=0 Sep 29 19:32:44 crc kubenswrapper[4779]: I0929 19:32:44.791652 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" event={"ID":"476bc421-1113-455e-bcc8-e207e47dad19","Type":"ContainerDied","Data":"d53b5519458bc3537a9f2faacb6e07e9c914e69c07211264054bfd272bc67ba6"} Sep 29 19:32:44 crc kubenswrapper[4779]: I0929 19:32:44.792429 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" event={"ID":"476bc421-1113-455e-bcc8-e207e47dad19","Type":"ContainerStarted","Data":"cabb38360f260f953132a0f1382a2873fbc40a1355bf1109d5e07b29c41a2fbb"} Sep 29 19:32:44 crc kubenswrapper[4779]: I0929 19:32:44.792473 4779 scope.go:117] "RemoveContainer" containerID="d6651225f345c1ab3f5b037a81a9d8ef2b74dcbcb999a569db1d5ddc8894af03" Sep 29 19:32:54 crc kubenswrapper[4779]: I0929 19:32:54.257258 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-m885h"] Sep 29 19:32:54 crc kubenswrapper[4779]: E0929 19:32:54.258430 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8cc6ce3d-fc28-42bf-936f-32c862f14079" containerName="extract-content" Sep 29 19:32:54 crc kubenswrapper[4779]: I0929 19:32:54.258452 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="8cc6ce3d-fc28-42bf-936f-32c862f14079" containerName="extract-content" Sep 29 19:32:54 crc kubenswrapper[4779]: E0929 19:32:54.258482 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8cc6ce3d-fc28-42bf-936f-32c862f14079" containerName="extract-utilities" Sep 29 19:32:54 crc kubenswrapper[4779]: I0929 19:32:54.258495 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="8cc6ce3d-fc28-42bf-936f-32c862f14079" containerName="extract-utilities" Sep 29 19:32:54 crc kubenswrapper[4779]: E0929 19:32:54.258521 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8cc6ce3d-fc28-42bf-936f-32c862f14079" containerName="registry-server" Sep 29 19:32:54 crc kubenswrapper[4779]: I0929 19:32:54.258533 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="8cc6ce3d-fc28-42bf-936f-32c862f14079" containerName="registry-server" Sep 29 19:32:54 crc kubenswrapper[4779]: I0929 19:32:54.258910 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="8cc6ce3d-fc28-42bf-936f-32c862f14079" containerName="registry-server" Sep 29 19:32:54 crc kubenswrapper[4779]: I0929 19:32:54.266888 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-m885h" Sep 29 19:32:54 crc kubenswrapper[4779]: I0929 19:32:54.268848 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-m885h"] Sep 29 19:32:54 crc kubenswrapper[4779]: I0929 19:32:54.433760 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e1dd63d1-d934-467b-bd2a-65b7869ec1a0-utilities\") pod \"redhat-operators-m885h\" (UID: \"e1dd63d1-d934-467b-bd2a-65b7869ec1a0\") " pod="openshift-marketplace/redhat-operators-m885h" Sep 29 19:32:54 crc kubenswrapper[4779]: I0929 19:32:54.433920 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lhk49\" (UniqueName: \"kubernetes.io/projected/e1dd63d1-d934-467b-bd2a-65b7869ec1a0-kube-api-access-lhk49\") pod \"redhat-operators-m885h\" (UID: \"e1dd63d1-d934-467b-bd2a-65b7869ec1a0\") " pod="openshift-marketplace/redhat-operators-m885h" Sep 29 19:32:54 crc kubenswrapper[4779]: I0929 19:32:54.434265 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e1dd63d1-d934-467b-bd2a-65b7869ec1a0-catalog-content\") pod \"redhat-operators-m885h\" (UID: \"e1dd63d1-d934-467b-bd2a-65b7869ec1a0\") " pod="openshift-marketplace/redhat-operators-m885h" Sep 29 19:32:54 crc kubenswrapper[4779]: I0929 19:32:54.536535 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lhk49\" (UniqueName: \"kubernetes.io/projected/e1dd63d1-d934-467b-bd2a-65b7869ec1a0-kube-api-access-lhk49\") pod \"redhat-operators-m885h\" (UID: \"e1dd63d1-d934-467b-bd2a-65b7869ec1a0\") " pod="openshift-marketplace/redhat-operators-m885h" Sep 29 19:32:54 crc kubenswrapper[4779]: I0929 19:32:54.536749 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e1dd63d1-d934-467b-bd2a-65b7869ec1a0-catalog-content\") pod \"redhat-operators-m885h\" (UID: \"e1dd63d1-d934-467b-bd2a-65b7869ec1a0\") " pod="openshift-marketplace/redhat-operators-m885h" Sep 29 19:32:54 crc kubenswrapper[4779]: I0929 19:32:54.536794 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e1dd63d1-d934-467b-bd2a-65b7869ec1a0-utilities\") pod \"redhat-operators-m885h\" (UID: \"e1dd63d1-d934-467b-bd2a-65b7869ec1a0\") " pod="openshift-marketplace/redhat-operators-m885h" Sep 29 19:32:54 crc kubenswrapper[4779]: I0929 19:32:54.537382 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e1dd63d1-d934-467b-bd2a-65b7869ec1a0-catalog-content\") pod \"redhat-operators-m885h\" (UID: \"e1dd63d1-d934-467b-bd2a-65b7869ec1a0\") " pod="openshift-marketplace/redhat-operators-m885h" Sep 29 19:32:54 crc kubenswrapper[4779]: I0929 19:32:54.537536 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e1dd63d1-d934-467b-bd2a-65b7869ec1a0-utilities\") pod \"redhat-operators-m885h\" (UID: \"e1dd63d1-d934-467b-bd2a-65b7869ec1a0\") " pod="openshift-marketplace/redhat-operators-m885h" Sep 29 19:32:54 crc kubenswrapper[4779]: I0929 19:32:54.561856 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lhk49\" (UniqueName: \"kubernetes.io/projected/e1dd63d1-d934-467b-bd2a-65b7869ec1a0-kube-api-access-lhk49\") pod \"redhat-operators-m885h\" (UID: \"e1dd63d1-d934-467b-bd2a-65b7869ec1a0\") " pod="openshift-marketplace/redhat-operators-m885h" Sep 29 19:32:54 crc kubenswrapper[4779]: I0929 19:32:54.606488 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-m885h" Sep 29 19:32:55 crc kubenswrapper[4779]: I0929 19:32:55.077162 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-m885h"] Sep 29 19:32:55 crc kubenswrapper[4779]: I0929 19:32:55.948299 4779 generic.go:334] "Generic (PLEG): container finished" podID="e1dd63d1-d934-467b-bd2a-65b7869ec1a0" containerID="47ba77de219ecfee67020b4fe981085d3f9deab783f703ff230f0958260de885" exitCode=0 Sep 29 19:32:55 crc kubenswrapper[4779]: I0929 19:32:55.948531 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-m885h" event={"ID":"e1dd63d1-d934-467b-bd2a-65b7869ec1a0","Type":"ContainerDied","Data":"47ba77de219ecfee67020b4fe981085d3f9deab783f703ff230f0958260de885"} Sep 29 19:32:55 crc kubenswrapper[4779]: I0929 19:32:55.948692 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-m885h" event={"ID":"e1dd63d1-d934-467b-bd2a-65b7869ec1a0","Type":"ContainerStarted","Data":"4492f09b2133bf7f564ec056802f0df7303ef74bc991b33ff72dc633fd905e6b"} Sep 29 19:32:56 crc kubenswrapper[4779]: I0929 19:32:56.970799 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-m885h" event={"ID":"e1dd63d1-d934-467b-bd2a-65b7869ec1a0","Type":"ContainerStarted","Data":"ceaac07c4887c326146aada34b16efcf1c38237a744b8346b96bad8ab962b9e9"} Sep 29 19:32:59 crc kubenswrapper[4779]: I0929 19:32:59.002409 4779 generic.go:334] "Generic (PLEG): container finished" podID="e1dd63d1-d934-467b-bd2a-65b7869ec1a0" containerID="ceaac07c4887c326146aada34b16efcf1c38237a744b8346b96bad8ab962b9e9" exitCode=0 Sep 29 19:32:59 crc kubenswrapper[4779]: I0929 19:32:59.002639 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-m885h" event={"ID":"e1dd63d1-d934-467b-bd2a-65b7869ec1a0","Type":"ContainerDied","Data":"ceaac07c4887c326146aada34b16efcf1c38237a744b8346b96bad8ab962b9e9"} Sep 29 19:33:00 crc kubenswrapper[4779]: I0929 19:33:00.013498 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-m885h" event={"ID":"e1dd63d1-d934-467b-bd2a-65b7869ec1a0","Type":"ContainerStarted","Data":"f65d3f0947828f8bdbaafa7cc025ab44c659f18dabbb9fa6b2b3f1162f65a7d3"} Sep 29 19:33:00 crc kubenswrapper[4779]: I0929 19:33:00.036242 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-m885h" podStartSLOduration=2.552681862 podStartE2EDuration="6.03621376s" podCreationTimestamp="2025-09-29 19:32:54 +0000 UTC" firstStartedPulling="2025-09-29 19:32:55.951375237 +0000 UTC m=+1486.835800337" lastFinishedPulling="2025-09-29 19:32:59.434907105 +0000 UTC m=+1490.319332235" observedRunningTime="2025-09-29 19:33:00.03510319 +0000 UTC m=+1490.919528290" watchObservedRunningTime="2025-09-29 19:33:00.03621376 +0000 UTC m=+1490.920638920" Sep 29 19:33:04 crc kubenswrapper[4779]: I0929 19:33:04.607240 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-m885h" Sep 29 19:33:04 crc kubenswrapper[4779]: I0929 19:33:04.607734 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-m885h" Sep 29 19:33:04 crc kubenswrapper[4779]: I0929 19:33:04.701505 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-m885h" Sep 29 19:33:05 crc kubenswrapper[4779]: I0929 19:33:05.126269 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-m885h" Sep 29 19:33:05 crc kubenswrapper[4779]: I0929 19:33:05.185751 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-m885h"] Sep 29 19:33:07 crc kubenswrapper[4779]: I0929 19:33:07.080453 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-m885h" podUID="e1dd63d1-d934-467b-bd2a-65b7869ec1a0" containerName="registry-server" containerID="cri-o://f65d3f0947828f8bdbaafa7cc025ab44c659f18dabbb9fa6b2b3f1162f65a7d3" gracePeriod=2 Sep 29 19:33:07 crc kubenswrapper[4779]: I0929 19:33:07.565442 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-m885h" Sep 29 19:33:07 crc kubenswrapper[4779]: I0929 19:33:07.715181 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lhk49\" (UniqueName: \"kubernetes.io/projected/e1dd63d1-d934-467b-bd2a-65b7869ec1a0-kube-api-access-lhk49\") pod \"e1dd63d1-d934-467b-bd2a-65b7869ec1a0\" (UID: \"e1dd63d1-d934-467b-bd2a-65b7869ec1a0\") " Sep 29 19:33:07 crc kubenswrapper[4779]: I0929 19:33:07.715305 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e1dd63d1-d934-467b-bd2a-65b7869ec1a0-catalog-content\") pod \"e1dd63d1-d934-467b-bd2a-65b7869ec1a0\" (UID: \"e1dd63d1-d934-467b-bd2a-65b7869ec1a0\") " Sep 29 19:33:07 crc kubenswrapper[4779]: I0929 19:33:07.715388 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e1dd63d1-d934-467b-bd2a-65b7869ec1a0-utilities\") pod \"e1dd63d1-d934-467b-bd2a-65b7869ec1a0\" (UID: \"e1dd63d1-d934-467b-bd2a-65b7869ec1a0\") " Sep 29 19:33:07 crc kubenswrapper[4779]: I0929 19:33:07.716247 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e1dd63d1-d934-467b-bd2a-65b7869ec1a0-utilities" (OuterVolumeSpecName: "utilities") pod "e1dd63d1-d934-467b-bd2a-65b7869ec1a0" (UID: "e1dd63d1-d934-467b-bd2a-65b7869ec1a0"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 19:33:07 crc kubenswrapper[4779]: I0929 19:33:07.724195 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e1dd63d1-d934-467b-bd2a-65b7869ec1a0-kube-api-access-lhk49" (OuterVolumeSpecName: "kube-api-access-lhk49") pod "e1dd63d1-d934-467b-bd2a-65b7869ec1a0" (UID: "e1dd63d1-d934-467b-bd2a-65b7869ec1a0"). InnerVolumeSpecName "kube-api-access-lhk49". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:33:07 crc kubenswrapper[4779]: I0929 19:33:07.804918 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e1dd63d1-d934-467b-bd2a-65b7869ec1a0-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "e1dd63d1-d934-467b-bd2a-65b7869ec1a0" (UID: "e1dd63d1-d934-467b-bd2a-65b7869ec1a0"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 19:33:07 crc kubenswrapper[4779]: I0929 19:33:07.817787 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lhk49\" (UniqueName: \"kubernetes.io/projected/e1dd63d1-d934-467b-bd2a-65b7869ec1a0-kube-api-access-lhk49\") on node \"crc\" DevicePath \"\"" Sep 29 19:33:07 crc kubenswrapper[4779]: I0929 19:33:07.817829 4779 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e1dd63d1-d934-467b-bd2a-65b7869ec1a0-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 19:33:07 crc kubenswrapper[4779]: I0929 19:33:07.817872 4779 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e1dd63d1-d934-467b-bd2a-65b7869ec1a0-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 19:33:08 crc kubenswrapper[4779]: I0929 19:33:08.089980 4779 generic.go:334] "Generic (PLEG): container finished" podID="e1dd63d1-d934-467b-bd2a-65b7869ec1a0" containerID="f65d3f0947828f8bdbaafa7cc025ab44c659f18dabbb9fa6b2b3f1162f65a7d3" exitCode=0 Sep 29 19:33:08 crc kubenswrapper[4779]: I0929 19:33:08.090086 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-m885h" Sep 29 19:33:08 crc kubenswrapper[4779]: I0929 19:33:08.090078 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-m885h" event={"ID":"e1dd63d1-d934-467b-bd2a-65b7869ec1a0","Type":"ContainerDied","Data":"f65d3f0947828f8bdbaafa7cc025ab44c659f18dabbb9fa6b2b3f1162f65a7d3"} Sep 29 19:33:08 crc kubenswrapper[4779]: I0929 19:33:08.091148 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-m885h" event={"ID":"e1dd63d1-d934-467b-bd2a-65b7869ec1a0","Type":"ContainerDied","Data":"4492f09b2133bf7f564ec056802f0df7303ef74bc991b33ff72dc633fd905e6b"} Sep 29 19:33:08 crc kubenswrapper[4779]: I0929 19:33:08.091175 4779 scope.go:117] "RemoveContainer" containerID="f65d3f0947828f8bdbaafa7cc025ab44c659f18dabbb9fa6b2b3f1162f65a7d3" Sep 29 19:33:08 crc kubenswrapper[4779]: I0929 19:33:08.131968 4779 scope.go:117] "RemoveContainer" containerID="ceaac07c4887c326146aada34b16efcf1c38237a744b8346b96bad8ab962b9e9" Sep 29 19:33:08 crc kubenswrapper[4779]: I0929 19:33:08.134231 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-m885h"] Sep 29 19:33:08 crc kubenswrapper[4779]: I0929 19:33:08.144028 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-m885h"] Sep 29 19:33:08 crc kubenswrapper[4779]: I0929 19:33:08.162893 4779 scope.go:117] "RemoveContainer" containerID="47ba77de219ecfee67020b4fe981085d3f9deab783f703ff230f0958260de885" Sep 29 19:33:08 crc kubenswrapper[4779]: I0929 19:33:08.209599 4779 scope.go:117] "RemoveContainer" containerID="f65d3f0947828f8bdbaafa7cc025ab44c659f18dabbb9fa6b2b3f1162f65a7d3" Sep 29 19:33:08 crc kubenswrapper[4779]: E0929 19:33:08.210022 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f65d3f0947828f8bdbaafa7cc025ab44c659f18dabbb9fa6b2b3f1162f65a7d3\": container with ID starting with f65d3f0947828f8bdbaafa7cc025ab44c659f18dabbb9fa6b2b3f1162f65a7d3 not found: ID does not exist" containerID="f65d3f0947828f8bdbaafa7cc025ab44c659f18dabbb9fa6b2b3f1162f65a7d3" Sep 29 19:33:08 crc kubenswrapper[4779]: I0929 19:33:08.210086 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f65d3f0947828f8bdbaafa7cc025ab44c659f18dabbb9fa6b2b3f1162f65a7d3"} err="failed to get container status \"f65d3f0947828f8bdbaafa7cc025ab44c659f18dabbb9fa6b2b3f1162f65a7d3\": rpc error: code = NotFound desc = could not find container \"f65d3f0947828f8bdbaafa7cc025ab44c659f18dabbb9fa6b2b3f1162f65a7d3\": container with ID starting with f65d3f0947828f8bdbaafa7cc025ab44c659f18dabbb9fa6b2b3f1162f65a7d3 not found: ID does not exist" Sep 29 19:33:08 crc kubenswrapper[4779]: I0929 19:33:08.210118 4779 scope.go:117] "RemoveContainer" containerID="ceaac07c4887c326146aada34b16efcf1c38237a744b8346b96bad8ab962b9e9" Sep 29 19:33:08 crc kubenswrapper[4779]: E0929 19:33:08.210490 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ceaac07c4887c326146aada34b16efcf1c38237a744b8346b96bad8ab962b9e9\": container with ID starting with ceaac07c4887c326146aada34b16efcf1c38237a744b8346b96bad8ab962b9e9 not found: ID does not exist" containerID="ceaac07c4887c326146aada34b16efcf1c38237a744b8346b96bad8ab962b9e9" Sep 29 19:33:08 crc kubenswrapper[4779]: I0929 19:33:08.210540 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ceaac07c4887c326146aada34b16efcf1c38237a744b8346b96bad8ab962b9e9"} err="failed to get container status \"ceaac07c4887c326146aada34b16efcf1c38237a744b8346b96bad8ab962b9e9\": rpc error: code = NotFound desc = could not find container \"ceaac07c4887c326146aada34b16efcf1c38237a744b8346b96bad8ab962b9e9\": container with ID starting with ceaac07c4887c326146aada34b16efcf1c38237a744b8346b96bad8ab962b9e9 not found: ID does not exist" Sep 29 19:33:08 crc kubenswrapper[4779]: I0929 19:33:08.210576 4779 scope.go:117] "RemoveContainer" containerID="47ba77de219ecfee67020b4fe981085d3f9deab783f703ff230f0958260de885" Sep 29 19:33:08 crc kubenswrapper[4779]: E0929 19:33:08.210847 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"47ba77de219ecfee67020b4fe981085d3f9deab783f703ff230f0958260de885\": container with ID starting with 47ba77de219ecfee67020b4fe981085d3f9deab783f703ff230f0958260de885 not found: ID does not exist" containerID="47ba77de219ecfee67020b4fe981085d3f9deab783f703ff230f0958260de885" Sep 29 19:33:08 crc kubenswrapper[4779]: I0929 19:33:08.210875 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"47ba77de219ecfee67020b4fe981085d3f9deab783f703ff230f0958260de885"} err="failed to get container status \"47ba77de219ecfee67020b4fe981085d3f9deab783f703ff230f0958260de885\": rpc error: code = NotFound desc = could not find container \"47ba77de219ecfee67020b4fe981085d3f9deab783f703ff230f0958260de885\": container with ID starting with 47ba77de219ecfee67020b4fe981085d3f9deab783f703ff230f0958260de885 not found: ID does not exist" Sep 29 19:33:09 crc kubenswrapper[4779]: I0929 19:33:09.783366 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e1dd63d1-d934-467b-bd2a-65b7869ec1a0" path="/var/lib/kubelet/pods/e1dd63d1-d934-467b-bd2a-65b7869ec1a0/volumes" Sep 29 19:33:37 crc kubenswrapper[4779]: I0929 19:33:37.439612 4779 generic.go:334] "Generic (PLEG): container finished" podID="8beecba1-5edc-4f95-a9ad-49889c62c0ae" containerID="145cf52862ff3e3260778eec9fc5c1b9dc0e4cf75f77d87d9d233b60974b1cea" exitCode=0 Sep 29 19:33:37 crc kubenswrapper[4779]: I0929 19:33:37.439718 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-d25cs" event={"ID":"8beecba1-5edc-4f95-a9ad-49889c62c0ae","Type":"ContainerDied","Data":"145cf52862ff3e3260778eec9fc5c1b9dc0e4cf75f77d87d9d233b60974b1cea"} Sep 29 19:33:38 crc kubenswrapper[4779]: I0929 19:33:38.885273 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-d25cs" Sep 29 19:33:38 crc kubenswrapper[4779]: I0929 19:33:38.938855 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8beecba1-5edc-4f95-a9ad-49889c62c0ae-inventory\") pod \"8beecba1-5edc-4f95-a9ad-49889c62c0ae\" (UID: \"8beecba1-5edc-4f95-a9ad-49889c62c0ae\") " Sep 29 19:33:38 crc kubenswrapper[4779]: I0929 19:33:38.938965 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vdqbh\" (UniqueName: \"kubernetes.io/projected/8beecba1-5edc-4f95-a9ad-49889c62c0ae-kube-api-access-vdqbh\") pod \"8beecba1-5edc-4f95-a9ad-49889c62c0ae\" (UID: \"8beecba1-5edc-4f95-a9ad-49889c62c0ae\") " Sep 29 19:33:38 crc kubenswrapper[4779]: I0929 19:33:38.939015 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8beecba1-5edc-4f95-a9ad-49889c62c0ae-ssh-key\") pod \"8beecba1-5edc-4f95-a9ad-49889c62c0ae\" (UID: \"8beecba1-5edc-4f95-a9ad-49889c62c0ae\") " Sep 29 19:33:38 crc kubenswrapper[4779]: I0929 19:33:38.939050 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8beecba1-5edc-4f95-a9ad-49889c62c0ae-bootstrap-combined-ca-bundle\") pod \"8beecba1-5edc-4f95-a9ad-49889c62c0ae\" (UID: \"8beecba1-5edc-4f95-a9ad-49889c62c0ae\") " Sep 29 19:33:38 crc kubenswrapper[4779]: I0929 19:33:38.944929 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8beecba1-5edc-4f95-a9ad-49889c62c0ae-kube-api-access-vdqbh" (OuterVolumeSpecName: "kube-api-access-vdqbh") pod "8beecba1-5edc-4f95-a9ad-49889c62c0ae" (UID: "8beecba1-5edc-4f95-a9ad-49889c62c0ae"). InnerVolumeSpecName "kube-api-access-vdqbh". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:33:38 crc kubenswrapper[4779]: I0929 19:33:38.945397 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8beecba1-5edc-4f95-a9ad-49889c62c0ae-bootstrap-combined-ca-bundle" (OuterVolumeSpecName: "bootstrap-combined-ca-bundle") pod "8beecba1-5edc-4f95-a9ad-49889c62c0ae" (UID: "8beecba1-5edc-4f95-a9ad-49889c62c0ae"). InnerVolumeSpecName "bootstrap-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:33:38 crc kubenswrapper[4779]: I0929 19:33:38.968796 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8beecba1-5edc-4f95-a9ad-49889c62c0ae-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "8beecba1-5edc-4f95-a9ad-49889c62c0ae" (UID: "8beecba1-5edc-4f95-a9ad-49889c62c0ae"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:33:38 crc kubenswrapper[4779]: I0929 19:33:38.971301 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8beecba1-5edc-4f95-a9ad-49889c62c0ae-inventory" (OuterVolumeSpecName: "inventory") pod "8beecba1-5edc-4f95-a9ad-49889c62c0ae" (UID: "8beecba1-5edc-4f95-a9ad-49889c62c0ae"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:33:39 crc kubenswrapper[4779]: I0929 19:33:39.041714 4779 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8beecba1-5edc-4f95-a9ad-49889c62c0ae-inventory\") on node \"crc\" DevicePath \"\"" Sep 29 19:33:39 crc kubenswrapper[4779]: I0929 19:33:39.041752 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vdqbh\" (UniqueName: \"kubernetes.io/projected/8beecba1-5edc-4f95-a9ad-49889c62c0ae-kube-api-access-vdqbh\") on node \"crc\" DevicePath \"\"" Sep 29 19:33:39 crc kubenswrapper[4779]: I0929 19:33:39.041765 4779 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8beecba1-5edc-4f95-a9ad-49889c62c0ae-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 29 19:33:39 crc kubenswrapper[4779]: I0929 19:33:39.041774 4779 reconciler_common.go:293] "Volume detached for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8beecba1-5edc-4f95-a9ad-49889c62c0ae-bootstrap-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 19:33:39 crc kubenswrapper[4779]: I0929 19:33:39.461597 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-d25cs" event={"ID":"8beecba1-5edc-4f95-a9ad-49889c62c0ae","Type":"ContainerDied","Data":"301466b0424c6f4b212697eeec7801d90b5b67639556b1dfd26ae4f6be558e9c"} Sep 29 19:33:39 crc kubenswrapper[4779]: I0929 19:33:39.461635 4779 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="301466b0424c6f4b212697eeec7801d90b5b67639556b1dfd26ae4f6be558e9c" Sep 29 19:33:39 crc kubenswrapper[4779]: I0929 19:33:39.461686 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-d25cs" Sep 29 19:33:39 crc kubenswrapper[4779]: I0929 19:33:39.576980 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/download-cache-edpm-deployment-openstack-edpm-ipam-sf7pw"] Sep 29 19:33:39 crc kubenswrapper[4779]: E0929 19:33:39.581412 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e1dd63d1-d934-467b-bd2a-65b7869ec1a0" containerName="extract-utilities" Sep 29 19:33:39 crc kubenswrapper[4779]: I0929 19:33:39.581567 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="e1dd63d1-d934-467b-bd2a-65b7869ec1a0" containerName="extract-utilities" Sep 29 19:33:39 crc kubenswrapper[4779]: E0929 19:33:39.581614 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8beecba1-5edc-4f95-a9ad-49889c62c0ae" containerName="bootstrap-edpm-deployment-openstack-edpm-ipam" Sep 29 19:33:39 crc kubenswrapper[4779]: I0929 19:33:39.581630 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="8beecba1-5edc-4f95-a9ad-49889c62c0ae" containerName="bootstrap-edpm-deployment-openstack-edpm-ipam" Sep 29 19:33:39 crc kubenswrapper[4779]: E0929 19:33:39.581741 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e1dd63d1-d934-467b-bd2a-65b7869ec1a0" containerName="registry-server" Sep 29 19:33:39 crc kubenswrapper[4779]: I0929 19:33:39.581759 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="e1dd63d1-d934-467b-bd2a-65b7869ec1a0" containerName="registry-server" Sep 29 19:33:39 crc kubenswrapper[4779]: E0929 19:33:39.581784 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e1dd63d1-d934-467b-bd2a-65b7869ec1a0" containerName="extract-content" Sep 29 19:33:39 crc kubenswrapper[4779]: I0929 19:33:39.581796 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="e1dd63d1-d934-467b-bd2a-65b7869ec1a0" containerName="extract-content" Sep 29 19:33:39 crc kubenswrapper[4779]: I0929 19:33:39.584301 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="e1dd63d1-d934-467b-bd2a-65b7869ec1a0" containerName="registry-server" Sep 29 19:33:39 crc kubenswrapper[4779]: I0929 19:33:39.584390 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="8beecba1-5edc-4f95-a9ad-49889c62c0ae" containerName="bootstrap-edpm-deployment-openstack-edpm-ipam" Sep 29 19:33:39 crc kubenswrapper[4779]: I0929 19:33:39.586489 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-sf7pw" Sep 29 19:33:39 crc kubenswrapper[4779]: I0929 19:33:39.596793 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Sep 29 19:33:39 crc kubenswrapper[4779]: I0929 19:33:39.597463 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-v8454" Sep 29 19:33:39 crc kubenswrapper[4779]: I0929 19:33:39.597985 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 29 19:33:39 crc kubenswrapper[4779]: I0929 19:33:39.599983 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Sep 29 19:33:39 crc kubenswrapper[4779]: I0929 19:33:39.624886 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/download-cache-edpm-deployment-openstack-edpm-ipam-sf7pw"] Sep 29 19:33:39 crc kubenswrapper[4779]: I0929 19:33:39.653014 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b2f2a76e-5c5c-4708-bc75-12909e8859fc-inventory\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-sf7pw\" (UID: \"b2f2a76e-5c5c-4708-bc75-12909e8859fc\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-sf7pw" Sep 29 19:33:39 crc kubenswrapper[4779]: I0929 19:33:39.653072 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/b2f2a76e-5c5c-4708-bc75-12909e8859fc-ssh-key\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-sf7pw\" (UID: \"b2f2a76e-5c5c-4708-bc75-12909e8859fc\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-sf7pw" Sep 29 19:33:39 crc kubenswrapper[4779]: I0929 19:33:39.653105 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-swszk\" (UniqueName: \"kubernetes.io/projected/b2f2a76e-5c5c-4708-bc75-12909e8859fc-kube-api-access-swszk\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-sf7pw\" (UID: \"b2f2a76e-5c5c-4708-bc75-12909e8859fc\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-sf7pw" Sep 29 19:33:39 crc kubenswrapper[4779]: I0929 19:33:39.754840 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b2f2a76e-5c5c-4708-bc75-12909e8859fc-inventory\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-sf7pw\" (UID: \"b2f2a76e-5c5c-4708-bc75-12909e8859fc\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-sf7pw" Sep 29 19:33:39 crc kubenswrapper[4779]: I0929 19:33:39.754901 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/b2f2a76e-5c5c-4708-bc75-12909e8859fc-ssh-key\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-sf7pw\" (UID: \"b2f2a76e-5c5c-4708-bc75-12909e8859fc\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-sf7pw" Sep 29 19:33:39 crc kubenswrapper[4779]: I0929 19:33:39.754938 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-swszk\" (UniqueName: \"kubernetes.io/projected/b2f2a76e-5c5c-4708-bc75-12909e8859fc-kube-api-access-swszk\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-sf7pw\" (UID: \"b2f2a76e-5c5c-4708-bc75-12909e8859fc\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-sf7pw" Sep 29 19:33:39 crc kubenswrapper[4779]: I0929 19:33:39.761263 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b2f2a76e-5c5c-4708-bc75-12909e8859fc-inventory\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-sf7pw\" (UID: \"b2f2a76e-5c5c-4708-bc75-12909e8859fc\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-sf7pw" Sep 29 19:33:39 crc kubenswrapper[4779]: I0929 19:33:39.764976 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/b2f2a76e-5c5c-4708-bc75-12909e8859fc-ssh-key\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-sf7pw\" (UID: \"b2f2a76e-5c5c-4708-bc75-12909e8859fc\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-sf7pw" Sep 29 19:33:39 crc kubenswrapper[4779]: I0929 19:33:39.776197 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-swszk\" (UniqueName: \"kubernetes.io/projected/b2f2a76e-5c5c-4708-bc75-12909e8859fc-kube-api-access-swszk\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-sf7pw\" (UID: \"b2f2a76e-5c5c-4708-bc75-12909e8859fc\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-sf7pw" Sep 29 19:33:39 crc kubenswrapper[4779]: I0929 19:33:39.913750 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-sf7pw" Sep 29 19:33:40 crc kubenswrapper[4779]: I0929 19:33:40.526090 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/download-cache-edpm-deployment-openstack-edpm-ipam-sf7pw"] Sep 29 19:33:40 crc kubenswrapper[4779]: W0929 19:33:40.527461 4779 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb2f2a76e_5c5c_4708_bc75_12909e8859fc.slice/crio-0e3bc15a7068b7a81f84507622ed877f47713d376208135966393198e1e27981 WatchSource:0}: Error finding container 0e3bc15a7068b7a81f84507622ed877f47713d376208135966393198e1e27981: Status 404 returned error can't find the container with id 0e3bc15a7068b7a81f84507622ed877f47713d376208135966393198e1e27981 Sep 29 19:33:40 crc kubenswrapper[4779]: I0929 19:33:40.530736 4779 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Sep 29 19:33:41 crc kubenswrapper[4779]: I0929 19:33:41.483223 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-sf7pw" event={"ID":"b2f2a76e-5c5c-4708-bc75-12909e8859fc","Type":"ContainerStarted","Data":"42d88df45caec0492f64c4d69ddb4b65398822942673d5c39c481ea3a25d0bbb"} Sep 29 19:33:41 crc kubenswrapper[4779]: I0929 19:33:41.483794 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-sf7pw" event={"ID":"b2f2a76e-5c5c-4708-bc75-12909e8859fc","Type":"ContainerStarted","Data":"0e3bc15a7068b7a81f84507622ed877f47713d376208135966393198e1e27981"} Sep 29 19:33:41 crc kubenswrapper[4779]: I0929 19:33:41.510298 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-sf7pw" podStartSLOduration=1.8633387300000002 podStartE2EDuration="2.510276127s" podCreationTimestamp="2025-09-29 19:33:39 +0000 UTC" firstStartedPulling="2025-09-29 19:33:40.530475342 +0000 UTC m=+1531.414900452" lastFinishedPulling="2025-09-29 19:33:41.177412739 +0000 UTC m=+1532.061837849" observedRunningTime="2025-09-29 19:33:41.503455981 +0000 UTC m=+1532.387881091" watchObservedRunningTime="2025-09-29 19:33:41.510276127 +0000 UTC m=+1532.394701237" Sep 29 19:34:14 crc kubenswrapper[4779]: I0929 19:34:14.733022 4779 scope.go:117] "RemoveContainer" containerID="a2e947d718363fd1a40e9c4d85c1c795e21e7a1a20250e8407ef63be7cb690b3" Sep 29 19:34:14 crc kubenswrapper[4779]: I0929 19:34:14.758973 4779 scope.go:117] "RemoveContainer" containerID="1c9778ec4d63551117bdeb932f250c4e87a793a27d8a5d8f32d8369c1c3c517f" Sep 29 19:34:17 crc kubenswrapper[4779]: I0929 19:34:17.439379 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-lcf5x"] Sep 29 19:34:17 crc kubenswrapper[4779]: I0929 19:34:17.441400 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-lcf5x" Sep 29 19:34:17 crc kubenswrapper[4779]: I0929 19:34:17.455458 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-lcf5x"] Sep 29 19:34:17 crc kubenswrapper[4779]: I0929 19:34:17.616598 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x97xx\" (UniqueName: \"kubernetes.io/projected/86990781-e428-4390-8356-8d5b23f81666-kube-api-access-x97xx\") pod \"community-operators-lcf5x\" (UID: \"86990781-e428-4390-8356-8d5b23f81666\") " pod="openshift-marketplace/community-operators-lcf5x" Sep 29 19:34:17 crc kubenswrapper[4779]: I0929 19:34:17.616746 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/86990781-e428-4390-8356-8d5b23f81666-catalog-content\") pod \"community-operators-lcf5x\" (UID: \"86990781-e428-4390-8356-8d5b23f81666\") " pod="openshift-marketplace/community-operators-lcf5x" Sep 29 19:34:17 crc kubenswrapper[4779]: I0929 19:34:17.616798 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/86990781-e428-4390-8356-8d5b23f81666-utilities\") pod \"community-operators-lcf5x\" (UID: \"86990781-e428-4390-8356-8d5b23f81666\") " pod="openshift-marketplace/community-operators-lcf5x" Sep 29 19:34:17 crc kubenswrapper[4779]: I0929 19:34:17.719171 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/86990781-e428-4390-8356-8d5b23f81666-catalog-content\") pod \"community-operators-lcf5x\" (UID: \"86990781-e428-4390-8356-8d5b23f81666\") " pod="openshift-marketplace/community-operators-lcf5x" Sep 29 19:34:17 crc kubenswrapper[4779]: I0929 19:34:17.719257 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/86990781-e428-4390-8356-8d5b23f81666-utilities\") pod \"community-operators-lcf5x\" (UID: \"86990781-e428-4390-8356-8d5b23f81666\") " pod="openshift-marketplace/community-operators-lcf5x" Sep 29 19:34:17 crc kubenswrapper[4779]: I0929 19:34:17.719433 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x97xx\" (UniqueName: \"kubernetes.io/projected/86990781-e428-4390-8356-8d5b23f81666-kube-api-access-x97xx\") pod \"community-operators-lcf5x\" (UID: \"86990781-e428-4390-8356-8d5b23f81666\") " pod="openshift-marketplace/community-operators-lcf5x" Sep 29 19:34:17 crc kubenswrapper[4779]: I0929 19:34:17.719846 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/86990781-e428-4390-8356-8d5b23f81666-catalog-content\") pod \"community-operators-lcf5x\" (UID: \"86990781-e428-4390-8356-8d5b23f81666\") " pod="openshift-marketplace/community-operators-lcf5x" Sep 29 19:34:17 crc kubenswrapper[4779]: I0929 19:34:17.719922 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/86990781-e428-4390-8356-8d5b23f81666-utilities\") pod \"community-operators-lcf5x\" (UID: \"86990781-e428-4390-8356-8d5b23f81666\") " pod="openshift-marketplace/community-operators-lcf5x" Sep 29 19:34:17 crc kubenswrapper[4779]: I0929 19:34:17.742120 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x97xx\" (UniqueName: \"kubernetes.io/projected/86990781-e428-4390-8356-8d5b23f81666-kube-api-access-x97xx\") pod \"community-operators-lcf5x\" (UID: \"86990781-e428-4390-8356-8d5b23f81666\") " pod="openshift-marketplace/community-operators-lcf5x" Sep 29 19:34:17 crc kubenswrapper[4779]: I0929 19:34:17.794894 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-lcf5x" Sep 29 19:34:18 crc kubenswrapper[4779]: I0929 19:34:18.332797 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-lcf5x"] Sep 29 19:34:18 crc kubenswrapper[4779]: I0929 19:34:18.875764 4779 generic.go:334] "Generic (PLEG): container finished" podID="86990781-e428-4390-8356-8d5b23f81666" containerID="ecfe6fcb6d99bfbb2cd6f8462dbc930253db0759917e170c857ec843795b8c77" exitCode=0 Sep 29 19:34:18 crc kubenswrapper[4779]: I0929 19:34:18.875843 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-lcf5x" event={"ID":"86990781-e428-4390-8356-8d5b23f81666","Type":"ContainerDied","Data":"ecfe6fcb6d99bfbb2cd6f8462dbc930253db0759917e170c857ec843795b8c77"} Sep 29 19:34:18 crc kubenswrapper[4779]: I0929 19:34:18.875898 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-lcf5x" event={"ID":"86990781-e428-4390-8356-8d5b23f81666","Type":"ContainerStarted","Data":"5d10a726d99450521ad138c2fd89315c704e1577f52b5a1f0d1a03d1787cc7ec"} Sep 29 19:34:19 crc kubenswrapper[4779]: I0929 19:34:19.887879 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-lcf5x" event={"ID":"86990781-e428-4390-8356-8d5b23f81666","Type":"ContainerStarted","Data":"d1294962e4efe2442126d93c18e24be01b6461da6b26786d3bac16f44e56d19d"} Sep 29 19:34:20 crc kubenswrapper[4779]: I0929 19:34:20.902077 4779 generic.go:334] "Generic (PLEG): container finished" podID="86990781-e428-4390-8356-8d5b23f81666" containerID="d1294962e4efe2442126d93c18e24be01b6461da6b26786d3bac16f44e56d19d" exitCode=0 Sep 29 19:34:20 crc kubenswrapper[4779]: I0929 19:34:20.902114 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-lcf5x" event={"ID":"86990781-e428-4390-8356-8d5b23f81666","Type":"ContainerDied","Data":"d1294962e4efe2442126d93c18e24be01b6461da6b26786d3bac16f44e56d19d"} Sep 29 19:34:21 crc kubenswrapper[4779]: I0929 19:34:21.913420 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-lcf5x" event={"ID":"86990781-e428-4390-8356-8d5b23f81666","Type":"ContainerStarted","Data":"1282cf59d381a7091e10ed5507123eadeaf00005be25f08764ae1d2a88c49407"} Sep 29 19:34:21 crc kubenswrapper[4779]: I0929 19:34:21.946304 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-lcf5x" podStartSLOduration=2.424019859 podStartE2EDuration="4.946279255s" podCreationTimestamp="2025-09-29 19:34:17 +0000 UTC" firstStartedPulling="2025-09-29 19:34:18.878160463 +0000 UTC m=+1569.762585563" lastFinishedPulling="2025-09-29 19:34:21.400419859 +0000 UTC m=+1572.284844959" observedRunningTime="2025-09-29 19:34:21.933429825 +0000 UTC m=+1572.817854965" watchObservedRunningTime="2025-09-29 19:34:21.946279255 +0000 UTC m=+1572.830704385" Sep 29 19:34:27 crc kubenswrapper[4779]: I0929 19:34:27.795937 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-lcf5x" Sep 29 19:34:27 crc kubenswrapper[4779]: I0929 19:34:27.796549 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-lcf5x" Sep 29 19:34:27 crc kubenswrapper[4779]: I0929 19:34:27.875474 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-lcf5x" Sep 29 19:34:28 crc kubenswrapper[4779]: I0929 19:34:28.048607 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-lcf5x" Sep 29 19:34:28 crc kubenswrapper[4779]: I0929 19:34:28.116400 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-lcf5x"] Sep 29 19:34:29 crc kubenswrapper[4779]: I0929 19:34:29.991921 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-lcf5x" podUID="86990781-e428-4390-8356-8d5b23f81666" containerName="registry-server" containerID="cri-o://1282cf59d381a7091e10ed5507123eadeaf00005be25f08764ae1d2a88c49407" gracePeriod=2 Sep 29 19:34:30 crc kubenswrapper[4779]: I0929 19:34:30.448537 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-lcf5x" Sep 29 19:34:30 crc kubenswrapper[4779]: I0929 19:34:30.576976 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x97xx\" (UniqueName: \"kubernetes.io/projected/86990781-e428-4390-8356-8d5b23f81666-kube-api-access-x97xx\") pod \"86990781-e428-4390-8356-8d5b23f81666\" (UID: \"86990781-e428-4390-8356-8d5b23f81666\") " Sep 29 19:34:30 crc kubenswrapper[4779]: I0929 19:34:30.577076 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/86990781-e428-4390-8356-8d5b23f81666-catalog-content\") pod \"86990781-e428-4390-8356-8d5b23f81666\" (UID: \"86990781-e428-4390-8356-8d5b23f81666\") " Sep 29 19:34:30 crc kubenswrapper[4779]: I0929 19:34:30.577164 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/86990781-e428-4390-8356-8d5b23f81666-utilities\") pod \"86990781-e428-4390-8356-8d5b23f81666\" (UID: \"86990781-e428-4390-8356-8d5b23f81666\") " Sep 29 19:34:30 crc kubenswrapper[4779]: I0929 19:34:30.578740 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/86990781-e428-4390-8356-8d5b23f81666-utilities" (OuterVolumeSpecName: "utilities") pod "86990781-e428-4390-8356-8d5b23f81666" (UID: "86990781-e428-4390-8356-8d5b23f81666"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 19:34:30 crc kubenswrapper[4779]: I0929 19:34:30.586248 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/86990781-e428-4390-8356-8d5b23f81666-kube-api-access-x97xx" (OuterVolumeSpecName: "kube-api-access-x97xx") pod "86990781-e428-4390-8356-8d5b23f81666" (UID: "86990781-e428-4390-8356-8d5b23f81666"). InnerVolumeSpecName "kube-api-access-x97xx". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:34:30 crc kubenswrapper[4779]: I0929 19:34:30.679156 4779 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/86990781-e428-4390-8356-8d5b23f81666-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 19:34:30 crc kubenswrapper[4779]: I0929 19:34:30.679187 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x97xx\" (UniqueName: \"kubernetes.io/projected/86990781-e428-4390-8356-8d5b23f81666-kube-api-access-x97xx\") on node \"crc\" DevicePath \"\"" Sep 29 19:34:30 crc kubenswrapper[4779]: I0929 19:34:30.684812 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/86990781-e428-4390-8356-8d5b23f81666-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "86990781-e428-4390-8356-8d5b23f81666" (UID: "86990781-e428-4390-8356-8d5b23f81666"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 19:34:30 crc kubenswrapper[4779]: I0929 19:34:30.780846 4779 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/86990781-e428-4390-8356-8d5b23f81666-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 19:34:31 crc kubenswrapper[4779]: I0929 19:34:31.015622 4779 generic.go:334] "Generic (PLEG): container finished" podID="86990781-e428-4390-8356-8d5b23f81666" containerID="1282cf59d381a7091e10ed5507123eadeaf00005be25f08764ae1d2a88c49407" exitCode=0 Sep 29 19:34:31 crc kubenswrapper[4779]: I0929 19:34:31.015675 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-lcf5x" event={"ID":"86990781-e428-4390-8356-8d5b23f81666","Type":"ContainerDied","Data":"1282cf59d381a7091e10ed5507123eadeaf00005be25f08764ae1d2a88c49407"} Sep 29 19:34:31 crc kubenswrapper[4779]: I0929 19:34:31.015703 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-lcf5x" event={"ID":"86990781-e428-4390-8356-8d5b23f81666","Type":"ContainerDied","Data":"5d10a726d99450521ad138c2fd89315c704e1577f52b5a1f0d1a03d1787cc7ec"} Sep 29 19:34:31 crc kubenswrapper[4779]: I0929 19:34:31.015721 4779 scope.go:117] "RemoveContainer" containerID="1282cf59d381a7091e10ed5507123eadeaf00005be25f08764ae1d2a88c49407" Sep 29 19:34:31 crc kubenswrapper[4779]: I0929 19:34:31.015909 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-lcf5x" Sep 29 19:34:31 crc kubenswrapper[4779]: I0929 19:34:31.049427 4779 scope.go:117] "RemoveContainer" containerID="d1294962e4efe2442126d93c18e24be01b6461da6b26786d3bac16f44e56d19d" Sep 29 19:34:31 crc kubenswrapper[4779]: I0929 19:34:31.061655 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-lcf5x"] Sep 29 19:34:31 crc kubenswrapper[4779]: I0929 19:34:31.074037 4779 scope.go:117] "RemoveContainer" containerID="ecfe6fcb6d99bfbb2cd6f8462dbc930253db0759917e170c857ec843795b8c77" Sep 29 19:34:31 crc kubenswrapper[4779]: I0929 19:34:31.080124 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-lcf5x"] Sep 29 19:34:31 crc kubenswrapper[4779]: I0929 19:34:31.146954 4779 scope.go:117] "RemoveContainer" containerID="1282cf59d381a7091e10ed5507123eadeaf00005be25f08764ae1d2a88c49407" Sep 29 19:34:31 crc kubenswrapper[4779]: E0929 19:34:31.147514 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1282cf59d381a7091e10ed5507123eadeaf00005be25f08764ae1d2a88c49407\": container with ID starting with 1282cf59d381a7091e10ed5507123eadeaf00005be25f08764ae1d2a88c49407 not found: ID does not exist" containerID="1282cf59d381a7091e10ed5507123eadeaf00005be25f08764ae1d2a88c49407" Sep 29 19:34:31 crc kubenswrapper[4779]: I0929 19:34:31.147566 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1282cf59d381a7091e10ed5507123eadeaf00005be25f08764ae1d2a88c49407"} err="failed to get container status \"1282cf59d381a7091e10ed5507123eadeaf00005be25f08764ae1d2a88c49407\": rpc error: code = NotFound desc = could not find container \"1282cf59d381a7091e10ed5507123eadeaf00005be25f08764ae1d2a88c49407\": container with ID starting with 1282cf59d381a7091e10ed5507123eadeaf00005be25f08764ae1d2a88c49407 not found: ID does not exist" Sep 29 19:34:31 crc kubenswrapper[4779]: I0929 19:34:31.147607 4779 scope.go:117] "RemoveContainer" containerID="d1294962e4efe2442126d93c18e24be01b6461da6b26786d3bac16f44e56d19d" Sep 29 19:34:31 crc kubenswrapper[4779]: E0929 19:34:31.147853 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d1294962e4efe2442126d93c18e24be01b6461da6b26786d3bac16f44e56d19d\": container with ID starting with d1294962e4efe2442126d93c18e24be01b6461da6b26786d3bac16f44e56d19d not found: ID does not exist" containerID="d1294962e4efe2442126d93c18e24be01b6461da6b26786d3bac16f44e56d19d" Sep 29 19:34:31 crc kubenswrapper[4779]: I0929 19:34:31.147895 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d1294962e4efe2442126d93c18e24be01b6461da6b26786d3bac16f44e56d19d"} err="failed to get container status \"d1294962e4efe2442126d93c18e24be01b6461da6b26786d3bac16f44e56d19d\": rpc error: code = NotFound desc = could not find container \"d1294962e4efe2442126d93c18e24be01b6461da6b26786d3bac16f44e56d19d\": container with ID starting with d1294962e4efe2442126d93c18e24be01b6461da6b26786d3bac16f44e56d19d not found: ID does not exist" Sep 29 19:34:31 crc kubenswrapper[4779]: I0929 19:34:31.147920 4779 scope.go:117] "RemoveContainer" containerID="ecfe6fcb6d99bfbb2cd6f8462dbc930253db0759917e170c857ec843795b8c77" Sep 29 19:34:31 crc kubenswrapper[4779]: E0929 19:34:31.148260 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ecfe6fcb6d99bfbb2cd6f8462dbc930253db0759917e170c857ec843795b8c77\": container with ID starting with ecfe6fcb6d99bfbb2cd6f8462dbc930253db0759917e170c857ec843795b8c77 not found: ID does not exist" containerID="ecfe6fcb6d99bfbb2cd6f8462dbc930253db0759917e170c857ec843795b8c77" Sep 29 19:34:31 crc kubenswrapper[4779]: I0929 19:34:31.148303 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ecfe6fcb6d99bfbb2cd6f8462dbc930253db0759917e170c857ec843795b8c77"} err="failed to get container status \"ecfe6fcb6d99bfbb2cd6f8462dbc930253db0759917e170c857ec843795b8c77\": rpc error: code = NotFound desc = could not find container \"ecfe6fcb6d99bfbb2cd6f8462dbc930253db0759917e170c857ec843795b8c77\": container with ID starting with ecfe6fcb6d99bfbb2cd6f8462dbc930253db0759917e170c857ec843795b8c77 not found: ID does not exist" Sep 29 19:34:31 crc kubenswrapper[4779]: I0929 19:34:31.780126 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="86990781-e428-4390-8356-8d5b23f81666" path="/var/lib/kubelet/pods/86990781-e428-4390-8356-8d5b23f81666/volumes" Sep 29 19:34:35 crc kubenswrapper[4779]: I0929 19:34:35.047779 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-db-create-4q8h8"] Sep 29 19:34:35 crc kubenswrapper[4779]: I0929 19:34:35.058032 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-db-create-kzm6s"] Sep 29 19:34:35 crc kubenswrapper[4779]: I0929 19:34:35.068527 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-db-create-df4sn"] Sep 29 19:34:35 crc kubenswrapper[4779]: I0929 19:34:35.074848 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-db-create-4q8h8"] Sep 29 19:34:35 crc kubenswrapper[4779]: I0929 19:34:35.082076 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-db-create-df4sn"] Sep 29 19:34:35 crc kubenswrapper[4779]: I0929 19:34:35.089249 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-db-create-kzm6s"] Sep 29 19:34:35 crc kubenswrapper[4779]: I0929 19:34:35.784043 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9a729519-3dac-4480-a83a-99f5fb79a284" path="/var/lib/kubelet/pods/9a729519-3dac-4480-a83a-99f5fb79a284/volumes" Sep 29 19:34:35 crc kubenswrapper[4779]: I0929 19:34:35.785783 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d04bbeb8-1f3e-47db-9c2e-a33c98f518b7" path="/var/lib/kubelet/pods/d04bbeb8-1f3e-47db-9c2e-a33c98f518b7/volumes" Sep 29 19:34:35 crc kubenswrapper[4779]: I0929 19:34:35.786958 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d8365cf2-c10d-40a2-9bfb-1a386175d137" path="/var/lib/kubelet/pods/d8365cf2-c10d-40a2-9bfb-1a386175d137/volumes" Sep 29 19:34:45 crc kubenswrapper[4779]: I0929 19:34:45.036137 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-4816-account-create-td7m9"] Sep 29 19:34:45 crc kubenswrapper[4779]: I0929 19:34:45.048014 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-4816-account-create-td7m9"] Sep 29 19:34:45 crc kubenswrapper[4779]: I0929 19:34:45.784548 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ac185b21-9179-49ca-9034-24d9e28f3dd2" path="/var/lib/kubelet/pods/ac185b21-9179-49ca-9034-24d9e28f3dd2/volumes" Sep 29 19:34:49 crc kubenswrapper[4779]: I0929 19:34:49.043275 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-8b67-account-create-lzcfr"] Sep 29 19:34:49 crc kubenswrapper[4779]: I0929 19:34:49.052183 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-5bc9-account-create-dbrvg"] Sep 29 19:34:49 crc kubenswrapper[4779]: I0929 19:34:49.060382 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-8b67-account-create-lzcfr"] Sep 29 19:34:49 crc kubenswrapper[4779]: I0929 19:34:49.067947 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-5bc9-account-create-dbrvg"] Sep 29 19:34:49 crc kubenswrapper[4779]: I0929 19:34:49.777074 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ba0df867-9aba-41a8-8359-4c93514a9115" path="/var/lib/kubelet/pods/ba0df867-9aba-41a8-8359-4c93514a9115/volumes" Sep 29 19:34:49 crc kubenswrapper[4779]: I0929 19:34:49.777898 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e1b73437-bcee-4bd0-82ff-3a88a0eebb4c" path="/var/lib/kubelet/pods/e1b73437-bcee-4bd0-82ff-3a88a0eebb4c/volumes" Sep 29 19:35:08 crc kubenswrapper[4779]: I0929 19:35:08.058037 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-db-create-7ttv2"] Sep 29 19:35:08 crc kubenswrapper[4779]: I0929 19:35:08.067000 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-db-create-zrrxv"] Sep 29 19:35:08 crc kubenswrapper[4779]: I0929 19:35:08.076027 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-db-create-k7hwz"] Sep 29 19:35:08 crc kubenswrapper[4779]: I0929 19:35:08.086249 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-db-create-7ttv2"] Sep 29 19:35:08 crc kubenswrapper[4779]: I0929 19:35:08.095354 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-db-create-k7hwz"] Sep 29 19:35:08 crc kubenswrapper[4779]: I0929 19:35:08.105681 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-db-create-zrrxv"] Sep 29 19:35:09 crc kubenswrapper[4779]: I0929 19:35:09.434082 4779 generic.go:334] "Generic (PLEG): container finished" podID="b2f2a76e-5c5c-4708-bc75-12909e8859fc" containerID="42d88df45caec0492f64c4d69ddb4b65398822942673d5c39c481ea3a25d0bbb" exitCode=0 Sep 29 19:35:09 crc kubenswrapper[4779]: I0929 19:35:09.434181 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-sf7pw" event={"ID":"b2f2a76e-5c5c-4708-bc75-12909e8859fc","Type":"ContainerDied","Data":"42d88df45caec0492f64c4d69ddb4b65398822942673d5c39c481ea3a25d0bbb"} Sep 29 19:35:09 crc kubenswrapper[4779]: I0929 19:35:09.807042 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1eb16ed1-d3da-47d3-bb78-26e3b67fbc90" path="/var/lib/kubelet/pods/1eb16ed1-d3da-47d3-bb78-26e3b67fbc90/volumes" Sep 29 19:35:09 crc kubenswrapper[4779]: I0929 19:35:09.808093 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="73a66bfb-fbcf-4e22-93aa-e8d91aa2892f" path="/var/lib/kubelet/pods/73a66bfb-fbcf-4e22-93aa-e8d91aa2892f/volumes" Sep 29 19:35:09 crc kubenswrapper[4779]: I0929 19:35:09.833259 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8267d69e-ea3c-4782-93d0-0bcf9a95bdf1" path="/var/lib/kubelet/pods/8267d69e-ea3c-4782-93d0-0bcf9a95bdf1/volumes" Sep 29 19:35:10 crc kubenswrapper[4779]: I0929 19:35:10.032544 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-db-sync-kqr8g"] Sep 29 19:35:10 crc kubenswrapper[4779]: I0929 19:35:10.042379 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-db-sync-kqr8g"] Sep 29 19:35:10 crc kubenswrapper[4779]: I0929 19:35:10.910837 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-sf7pw" Sep 29 19:35:11 crc kubenswrapper[4779]: I0929 19:35:11.066176 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-swszk\" (UniqueName: \"kubernetes.io/projected/b2f2a76e-5c5c-4708-bc75-12909e8859fc-kube-api-access-swszk\") pod \"b2f2a76e-5c5c-4708-bc75-12909e8859fc\" (UID: \"b2f2a76e-5c5c-4708-bc75-12909e8859fc\") " Sep 29 19:35:11 crc kubenswrapper[4779]: I0929 19:35:11.066385 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/b2f2a76e-5c5c-4708-bc75-12909e8859fc-ssh-key\") pod \"b2f2a76e-5c5c-4708-bc75-12909e8859fc\" (UID: \"b2f2a76e-5c5c-4708-bc75-12909e8859fc\") " Sep 29 19:35:11 crc kubenswrapper[4779]: I0929 19:35:11.066520 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b2f2a76e-5c5c-4708-bc75-12909e8859fc-inventory\") pod \"b2f2a76e-5c5c-4708-bc75-12909e8859fc\" (UID: \"b2f2a76e-5c5c-4708-bc75-12909e8859fc\") " Sep 29 19:35:11 crc kubenswrapper[4779]: I0929 19:35:11.071709 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b2f2a76e-5c5c-4708-bc75-12909e8859fc-kube-api-access-swszk" (OuterVolumeSpecName: "kube-api-access-swszk") pod "b2f2a76e-5c5c-4708-bc75-12909e8859fc" (UID: "b2f2a76e-5c5c-4708-bc75-12909e8859fc"). InnerVolumeSpecName "kube-api-access-swszk". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:35:11 crc kubenswrapper[4779]: I0929 19:35:11.098811 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b2f2a76e-5c5c-4708-bc75-12909e8859fc-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "b2f2a76e-5c5c-4708-bc75-12909e8859fc" (UID: "b2f2a76e-5c5c-4708-bc75-12909e8859fc"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:35:11 crc kubenswrapper[4779]: I0929 19:35:11.116571 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b2f2a76e-5c5c-4708-bc75-12909e8859fc-inventory" (OuterVolumeSpecName: "inventory") pod "b2f2a76e-5c5c-4708-bc75-12909e8859fc" (UID: "b2f2a76e-5c5c-4708-bc75-12909e8859fc"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:35:11 crc kubenswrapper[4779]: I0929 19:35:11.168467 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-swszk\" (UniqueName: \"kubernetes.io/projected/b2f2a76e-5c5c-4708-bc75-12909e8859fc-kube-api-access-swszk\") on node \"crc\" DevicePath \"\"" Sep 29 19:35:11 crc kubenswrapper[4779]: I0929 19:35:11.168507 4779 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/b2f2a76e-5c5c-4708-bc75-12909e8859fc-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 29 19:35:11 crc kubenswrapper[4779]: I0929 19:35:11.168521 4779 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b2f2a76e-5c5c-4708-bc75-12909e8859fc-inventory\") on node \"crc\" DevicePath \"\"" Sep 29 19:35:11 crc kubenswrapper[4779]: I0929 19:35:11.454980 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-sf7pw" event={"ID":"b2f2a76e-5c5c-4708-bc75-12909e8859fc","Type":"ContainerDied","Data":"0e3bc15a7068b7a81f84507622ed877f47713d376208135966393198e1e27981"} Sep 29 19:35:11 crc kubenswrapper[4779]: I0929 19:35:11.455020 4779 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0e3bc15a7068b7a81f84507622ed877f47713d376208135966393198e1e27981" Sep 29 19:35:11 crc kubenswrapper[4779]: I0929 19:35:11.455407 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-sf7pw" Sep 29 19:35:11 crc kubenswrapper[4779]: I0929 19:35:11.549496 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-c66cd"] Sep 29 19:35:11 crc kubenswrapper[4779]: E0929 19:35:11.549965 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="86990781-e428-4390-8356-8d5b23f81666" containerName="registry-server" Sep 29 19:35:11 crc kubenswrapper[4779]: I0929 19:35:11.549985 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="86990781-e428-4390-8356-8d5b23f81666" containerName="registry-server" Sep 29 19:35:11 crc kubenswrapper[4779]: E0929 19:35:11.550009 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="86990781-e428-4390-8356-8d5b23f81666" containerName="extract-content" Sep 29 19:35:11 crc kubenswrapper[4779]: I0929 19:35:11.550017 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="86990781-e428-4390-8356-8d5b23f81666" containerName="extract-content" Sep 29 19:35:11 crc kubenswrapper[4779]: E0929 19:35:11.550033 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="86990781-e428-4390-8356-8d5b23f81666" containerName="extract-utilities" Sep 29 19:35:11 crc kubenswrapper[4779]: I0929 19:35:11.550040 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="86990781-e428-4390-8356-8d5b23f81666" containerName="extract-utilities" Sep 29 19:35:11 crc kubenswrapper[4779]: E0929 19:35:11.550067 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b2f2a76e-5c5c-4708-bc75-12909e8859fc" containerName="download-cache-edpm-deployment-openstack-edpm-ipam" Sep 29 19:35:11 crc kubenswrapper[4779]: I0929 19:35:11.550075 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="b2f2a76e-5c5c-4708-bc75-12909e8859fc" containerName="download-cache-edpm-deployment-openstack-edpm-ipam" Sep 29 19:35:11 crc kubenswrapper[4779]: I0929 19:35:11.550338 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="b2f2a76e-5c5c-4708-bc75-12909e8859fc" containerName="download-cache-edpm-deployment-openstack-edpm-ipam" Sep 29 19:35:11 crc kubenswrapper[4779]: I0929 19:35:11.550360 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="86990781-e428-4390-8356-8d5b23f81666" containerName="registry-server" Sep 29 19:35:11 crc kubenswrapper[4779]: I0929 19:35:11.551170 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-c66cd" Sep 29 19:35:11 crc kubenswrapper[4779]: I0929 19:35:11.554459 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Sep 29 19:35:11 crc kubenswrapper[4779]: I0929 19:35:11.554549 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-v8454" Sep 29 19:35:11 crc kubenswrapper[4779]: I0929 19:35:11.554881 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 29 19:35:11 crc kubenswrapper[4779]: I0929 19:35:11.554921 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Sep 29 19:35:11 crc kubenswrapper[4779]: I0929 19:35:11.559575 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-c66cd"] Sep 29 19:35:11 crc kubenswrapper[4779]: I0929 19:35:11.680171 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vqhn5\" (UniqueName: \"kubernetes.io/projected/02971c6b-be51-4634-b3a0-661125814bea-kube-api-access-vqhn5\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-c66cd\" (UID: \"02971c6b-be51-4634-b3a0-661125814bea\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-c66cd" Sep 29 19:35:11 crc kubenswrapper[4779]: I0929 19:35:11.680398 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/02971c6b-be51-4634-b3a0-661125814bea-inventory\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-c66cd\" (UID: \"02971c6b-be51-4634-b3a0-661125814bea\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-c66cd" Sep 29 19:35:11 crc kubenswrapper[4779]: I0929 19:35:11.680456 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/02971c6b-be51-4634-b3a0-661125814bea-ssh-key\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-c66cd\" (UID: \"02971c6b-be51-4634-b3a0-661125814bea\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-c66cd" Sep 29 19:35:11 crc kubenswrapper[4779]: I0929 19:35:11.781842 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/02971c6b-be51-4634-b3a0-661125814bea-inventory\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-c66cd\" (UID: \"02971c6b-be51-4634-b3a0-661125814bea\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-c66cd" Sep 29 19:35:11 crc kubenswrapper[4779]: I0929 19:35:11.781888 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/02971c6b-be51-4634-b3a0-661125814bea-ssh-key\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-c66cd\" (UID: \"02971c6b-be51-4634-b3a0-661125814bea\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-c66cd" Sep 29 19:35:11 crc kubenswrapper[4779]: I0929 19:35:11.782005 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vqhn5\" (UniqueName: \"kubernetes.io/projected/02971c6b-be51-4634-b3a0-661125814bea-kube-api-access-vqhn5\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-c66cd\" (UID: \"02971c6b-be51-4634-b3a0-661125814bea\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-c66cd" Sep 29 19:35:11 crc kubenswrapper[4779]: I0929 19:35:11.784053 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2eea816e-5b9e-4646-bd6a-2421436d9c90" path="/var/lib/kubelet/pods/2eea816e-5b9e-4646-bd6a-2421436d9c90/volumes" Sep 29 19:35:11 crc kubenswrapper[4779]: I0929 19:35:11.786105 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/02971c6b-be51-4634-b3a0-661125814bea-ssh-key\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-c66cd\" (UID: \"02971c6b-be51-4634-b3a0-661125814bea\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-c66cd" Sep 29 19:35:11 crc kubenswrapper[4779]: I0929 19:35:11.788456 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/02971c6b-be51-4634-b3a0-661125814bea-inventory\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-c66cd\" (UID: \"02971c6b-be51-4634-b3a0-661125814bea\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-c66cd" Sep 29 19:35:11 crc kubenswrapper[4779]: I0929 19:35:11.801700 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vqhn5\" (UniqueName: \"kubernetes.io/projected/02971c6b-be51-4634-b3a0-661125814bea-kube-api-access-vqhn5\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-c66cd\" (UID: \"02971c6b-be51-4634-b3a0-661125814bea\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-c66cd" Sep 29 19:35:11 crc kubenswrapper[4779]: I0929 19:35:11.880295 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-c66cd" Sep 29 19:35:12 crc kubenswrapper[4779]: I0929 19:35:12.432046 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-c66cd"] Sep 29 19:35:12 crc kubenswrapper[4779]: W0929 19:35:12.437782 4779 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod02971c6b_be51_4634_b3a0_661125814bea.slice/crio-197e5abd97088748b7ec8195dfb4a0eef0109ca14fd3289c2ece02746dc6b9d1 WatchSource:0}: Error finding container 197e5abd97088748b7ec8195dfb4a0eef0109ca14fd3289c2ece02746dc6b9d1: Status 404 returned error can't find the container with id 197e5abd97088748b7ec8195dfb4a0eef0109ca14fd3289c2ece02746dc6b9d1 Sep 29 19:35:12 crc kubenswrapper[4779]: I0929 19:35:12.465009 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-c66cd" event={"ID":"02971c6b-be51-4634-b3a0-661125814bea","Type":"ContainerStarted","Data":"197e5abd97088748b7ec8195dfb4a0eef0109ca14fd3289c2ece02746dc6b9d1"} Sep 29 19:35:13 crc kubenswrapper[4779]: I0929 19:35:13.056347 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-db-sync-4ww89"] Sep 29 19:35:13 crc kubenswrapper[4779]: I0929 19:35:13.069759 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-db-sync-4ww89"] Sep 29 19:35:13 crc kubenswrapper[4779]: I0929 19:35:13.491452 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-c66cd" event={"ID":"02971c6b-be51-4634-b3a0-661125814bea","Type":"ContainerStarted","Data":"60f2366a1e9c790783a747a8bf2cb5adf2d842b50865182768724801e762dd4c"} Sep 29 19:35:13 crc kubenswrapper[4779]: I0929 19:35:13.512071 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-c66cd" podStartSLOduration=1.9684074379999998 podStartE2EDuration="2.512044107s" podCreationTimestamp="2025-09-29 19:35:11 +0000 UTC" firstStartedPulling="2025-09-29 19:35:12.440023567 +0000 UTC m=+1623.324448667" lastFinishedPulling="2025-09-29 19:35:12.983660226 +0000 UTC m=+1623.868085336" observedRunningTime="2025-09-29 19:35:13.510394932 +0000 UTC m=+1624.394820092" watchObservedRunningTime="2025-09-29 19:35:13.512044107 +0000 UTC m=+1624.396469247" Sep 29 19:35:13 crc kubenswrapper[4779]: I0929 19:35:13.779954 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e8978916-c429-4c70-8f74-93e9a49a8ae7" path="/var/lib/kubelet/pods/e8978916-c429-4c70-8f74-93e9a49a8ae7/volumes" Sep 29 19:35:13 crc kubenswrapper[4779]: I0929 19:35:13.786833 4779 patch_prober.go:28] interesting pod/machine-config-daemon-d5cnr container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 19:35:13 crc kubenswrapper[4779]: I0929 19:35:13.787144 4779 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" podUID="476bc421-1113-455e-bcc8-e207e47dad19" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 19:35:14 crc kubenswrapper[4779]: I0929 19:35:14.831696 4779 scope.go:117] "RemoveContainer" containerID="6ec2e0e43972651fadf2f1cda510284be6b29c03dd9312f38660af2b18c13af9" Sep 29 19:35:14 crc kubenswrapper[4779]: I0929 19:35:14.861343 4779 scope.go:117] "RemoveContainer" containerID="8b630a1b098eec73f5c8b0f2124b002327083b57620a94344709aff52d0af6bf" Sep 29 19:35:14 crc kubenswrapper[4779]: I0929 19:35:14.929908 4779 scope.go:117] "RemoveContainer" containerID="f5576d4845e19e471e67a51c575cf3f68e5ab6e7ae2249b9d9ebf065fd358162" Sep 29 19:35:15 crc kubenswrapper[4779]: I0929 19:35:15.014158 4779 scope.go:117] "RemoveContainer" containerID="b3aa197877ec7e530c30943503880e07fa9de4bc59f8b25788504d3932934546" Sep 29 19:35:15 crc kubenswrapper[4779]: I0929 19:35:15.040670 4779 scope.go:117] "RemoveContainer" containerID="71923f0bc15f888e5cd4744b897d266373e28fa132a149eb0d06dd90f977b607" Sep 29 19:35:15 crc kubenswrapper[4779]: I0929 19:35:15.082675 4779 scope.go:117] "RemoveContainer" containerID="fc3bff118f4589e6778d2d9ec0a4c0ff2ecd71f1ac6b38bcbfc201f3f939cc45" Sep 29 19:35:15 crc kubenswrapper[4779]: I0929 19:35:15.123017 4779 scope.go:117] "RemoveContainer" containerID="d247d11fd42cdb449ad4242ad724d481e72a91c40d19d74e638d5bae1b2ec898" Sep 29 19:35:15 crc kubenswrapper[4779]: I0929 19:35:15.167280 4779 scope.go:117] "RemoveContainer" containerID="9e83a75fa8662bc7c160d36dbe67201c1fbd53e867327259fb84c3bbeaab3d39" Sep 29 19:35:15 crc kubenswrapper[4779]: I0929 19:35:15.189154 4779 scope.go:117] "RemoveContainer" containerID="5266be70869be4aafd3e27e00724b9889d8f93061c53c946cb9eba201f86e811" Sep 29 19:35:15 crc kubenswrapper[4779]: I0929 19:35:15.205914 4779 scope.go:117] "RemoveContainer" containerID="47e4fe6eb9a5a9707e44407bbcccba3eab68529066ae254fbd72f01e3ccb9b40" Sep 29 19:35:15 crc kubenswrapper[4779]: I0929 19:35:15.223280 4779 scope.go:117] "RemoveContainer" containerID="488253d205161a35bcda5d2176cd378696eb78fba9baf3e497370a51674ba108" Sep 29 19:35:36 crc kubenswrapper[4779]: I0929 19:35:36.042409 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-df0d-account-create-fhcpp"] Sep 29 19:35:36 crc kubenswrapper[4779]: I0929 19:35:36.050449 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-5f5e-account-create-2n6rt"] Sep 29 19:35:36 crc kubenswrapper[4779]: I0929 19:35:36.060460 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-5f48-account-create-vt6p4"] Sep 29 19:35:36 crc kubenswrapper[4779]: I0929 19:35:36.068156 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-5f48-account-create-vt6p4"] Sep 29 19:35:36 crc kubenswrapper[4779]: I0929 19:35:36.075294 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-df0d-account-create-fhcpp"] Sep 29 19:35:36 crc kubenswrapper[4779]: I0929 19:35:36.082743 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-5f5e-account-create-2n6rt"] Sep 29 19:35:37 crc kubenswrapper[4779]: I0929 19:35:37.044082 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-db-sync-xphf4"] Sep 29 19:35:37 crc kubenswrapper[4779]: I0929 19:35:37.057355 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-db-sync-xphf4"] Sep 29 19:35:37 crc kubenswrapper[4779]: I0929 19:35:37.779844 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1f88f50b-5057-4d90-b8d4-fdd4526eaf25" path="/var/lib/kubelet/pods/1f88f50b-5057-4d90-b8d4-fdd4526eaf25/volumes" Sep 29 19:35:37 crc kubenswrapper[4779]: I0929 19:35:37.780659 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="35f86f4e-1390-4e24-bd8e-2a5cd9899d29" path="/var/lib/kubelet/pods/35f86f4e-1390-4e24-bd8e-2a5cd9899d29/volumes" Sep 29 19:35:37 crc kubenswrapper[4779]: I0929 19:35:37.781276 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="48f7f919-2b5f-4954-8aa8-0b0abf3cb4fe" path="/var/lib/kubelet/pods/48f7f919-2b5f-4954-8aa8-0b0abf3cb4fe/volumes" Sep 29 19:35:37 crc kubenswrapper[4779]: I0929 19:35:37.781803 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b449307e-0969-471c-84c5-ce1a24b143e3" path="/var/lib/kubelet/pods/b449307e-0969-471c-84c5-ce1a24b143e3/volumes" Sep 29 19:35:42 crc kubenswrapper[4779]: I0929 19:35:42.044786 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-bootstrap-nrnv2"] Sep 29 19:35:42 crc kubenswrapper[4779]: I0929 19:35:42.059764 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-bootstrap-nrnv2"] Sep 29 19:35:43 crc kubenswrapper[4779]: I0929 19:35:43.784734 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2a64e434-cdce-47ed-9c44-ab5109920fc7" path="/var/lib/kubelet/pods/2a64e434-cdce-47ed-9c44-ab5109920fc7/volumes" Sep 29 19:35:43 crc kubenswrapper[4779]: I0929 19:35:43.784860 4779 patch_prober.go:28] interesting pod/machine-config-daemon-d5cnr container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 19:35:43 crc kubenswrapper[4779]: I0929 19:35:43.785763 4779 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" podUID="476bc421-1113-455e-bcc8-e207e47dad19" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 19:36:03 crc kubenswrapper[4779]: I0929 19:36:03.062550 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-db-sync-6rvdb"] Sep 29 19:36:03 crc kubenswrapper[4779]: I0929 19:36:03.078388 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-db-sync-6rvdb"] Sep 29 19:36:03 crc kubenswrapper[4779]: I0929 19:36:03.779683 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f4c19704-2fd1-4d08-a947-f80d1d84f543" path="/var/lib/kubelet/pods/f4c19704-2fd1-4d08-a947-f80d1d84f543/volumes" Sep 29 19:36:13 crc kubenswrapper[4779]: I0929 19:36:13.785215 4779 patch_prober.go:28] interesting pod/machine-config-daemon-d5cnr container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 19:36:13 crc kubenswrapper[4779]: I0929 19:36:13.785887 4779 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" podUID="476bc421-1113-455e-bcc8-e207e47dad19" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 19:36:13 crc kubenswrapper[4779]: I0929 19:36:13.785946 4779 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" Sep 29 19:36:13 crc kubenswrapper[4779]: I0929 19:36:13.786994 4779 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"cabb38360f260f953132a0f1382a2873fbc40a1355bf1109d5e07b29c41a2fbb"} pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 29 19:36:13 crc kubenswrapper[4779]: I0929 19:36:13.787094 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" podUID="476bc421-1113-455e-bcc8-e207e47dad19" containerName="machine-config-daemon" containerID="cri-o://cabb38360f260f953132a0f1382a2873fbc40a1355bf1109d5e07b29c41a2fbb" gracePeriod=600 Sep 29 19:36:13 crc kubenswrapper[4779]: E0929 19:36:13.932847 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d5cnr_openshift-machine-config-operator(476bc421-1113-455e-bcc8-e207e47dad19)\"" pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" podUID="476bc421-1113-455e-bcc8-e207e47dad19" Sep 29 19:36:14 crc kubenswrapper[4779]: I0929 19:36:14.125998 4779 generic.go:334] "Generic (PLEG): container finished" podID="476bc421-1113-455e-bcc8-e207e47dad19" containerID="cabb38360f260f953132a0f1382a2873fbc40a1355bf1109d5e07b29c41a2fbb" exitCode=0 Sep 29 19:36:14 crc kubenswrapper[4779]: I0929 19:36:14.126068 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" event={"ID":"476bc421-1113-455e-bcc8-e207e47dad19","Type":"ContainerDied","Data":"cabb38360f260f953132a0f1382a2873fbc40a1355bf1109d5e07b29c41a2fbb"} Sep 29 19:36:14 crc kubenswrapper[4779]: I0929 19:36:14.126384 4779 scope.go:117] "RemoveContainer" containerID="d53b5519458bc3537a9f2faacb6e07e9c914e69c07211264054bfd272bc67ba6" Sep 29 19:36:14 crc kubenswrapper[4779]: I0929 19:36:14.127434 4779 scope.go:117] "RemoveContainer" containerID="cabb38360f260f953132a0f1382a2873fbc40a1355bf1109d5e07b29c41a2fbb" Sep 29 19:36:14 crc kubenswrapper[4779]: E0929 19:36:14.128028 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d5cnr_openshift-machine-config-operator(476bc421-1113-455e-bcc8-e207e47dad19)\"" pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" podUID="476bc421-1113-455e-bcc8-e207e47dad19" Sep 29 19:36:15 crc kubenswrapper[4779]: I0929 19:36:15.453969 4779 scope.go:117] "RemoveContainer" containerID="aeef032f15bbbc9a9afdbe35cf7bcd63a67cdbcb686d8bab9bb709083cb9ae8a" Sep 29 19:36:15 crc kubenswrapper[4779]: I0929 19:36:15.501020 4779 scope.go:117] "RemoveContainer" containerID="8d670c253599dc0e5bc29861aa4be93aaf4b0e553af70d422a3abb4a91d29125" Sep 29 19:36:15 crc kubenswrapper[4779]: I0929 19:36:15.531654 4779 scope.go:117] "RemoveContainer" containerID="f5e83d93913e6d73ab31cd72f16ff3e845ff2ee36708c4e494e4182e7f54e320" Sep 29 19:36:15 crc kubenswrapper[4779]: I0929 19:36:15.583517 4779 scope.go:117] "RemoveContainer" containerID="a6b63f090a525b692b14f054a947c515919a67e98d9fad30671a586cd42e7562" Sep 29 19:36:15 crc kubenswrapper[4779]: I0929 19:36:15.630944 4779 scope.go:117] "RemoveContainer" containerID="c3b5560342e39c7e34e52d23e1dad9c38471e5cd37ab0775116e04fa32954ec5" Sep 29 19:36:15 crc kubenswrapper[4779]: I0929 19:36:15.653957 4779 scope.go:117] "RemoveContainer" containerID="921e448cc9c870a1beb00d75c5b3de36f80f2aa45237cbacb26546bf2fe92438" Sep 29 19:36:19 crc kubenswrapper[4779]: I0929 19:36:19.037186 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-db-sync-2kkwn"] Sep 29 19:36:19 crc kubenswrapper[4779]: I0929 19:36:19.044569 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-db-sync-2kkwn"] Sep 29 19:36:19 crc kubenswrapper[4779]: I0929 19:36:19.787476 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0af2a79f-956f-478b-baa6-bc18b1accce9" path="/var/lib/kubelet/pods/0af2a79f-956f-478b-baa6-bc18b1accce9/volumes" Sep 29 19:36:25 crc kubenswrapper[4779]: I0929 19:36:25.240517 4779 generic.go:334] "Generic (PLEG): container finished" podID="02971c6b-be51-4634-b3a0-661125814bea" containerID="60f2366a1e9c790783a747a8bf2cb5adf2d842b50865182768724801e762dd4c" exitCode=0 Sep 29 19:36:25 crc kubenswrapper[4779]: I0929 19:36:25.240632 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-c66cd" event={"ID":"02971c6b-be51-4634-b3a0-661125814bea","Type":"ContainerDied","Data":"60f2366a1e9c790783a747a8bf2cb5adf2d842b50865182768724801e762dd4c"} Sep 29 19:36:26 crc kubenswrapper[4779]: I0929 19:36:26.048171 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-db-sync-wnn5c"] Sep 29 19:36:26 crc kubenswrapper[4779]: I0929 19:36:26.065389 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-db-sync-wnn5c"] Sep 29 19:36:26 crc kubenswrapper[4779]: I0929 19:36:26.668168 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-c66cd" Sep 29 19:36:26 crc kubenswrapper[4779]: I0929 19:36:26.765895 4779 scope.go:117] "RemoveContainer" containerID="cabb38360f260f953132a0f1382a2873fbc40a1355bf1109d5e07b29c41a2fbb" Sep 29 19:36:26 crc kubenswrapper[4779]: E0929 19:36:26.766239 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d5cnr_openshift-machine-config-operator(476bc421-1113-455e-bcc8-e207e47dad19)\"" pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" podUID="476bc421-1113-455e-bcc8-e207e47dad19" Sep 29 19:36:26 crc kubenswrapper[4779]: I0929 19:36:26.829164 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/02971c6b-be51-4634-b3a0-661125814bea-inventory\") pod \"02971c6b-be51-4634-b3a0-661125814bea\" (UID: \"02971c6b-be51-4634-b3a0-661125814bea\") " Sep 29 19:36:26 crc kubenswrapper[4779]: I0929 19:36:26.829221 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vqhn5\" (UniqueName: \"kubernetes.io/projected/02971c6b-be51-4634-b3a0-661125814bea-kube-api-access-vqhn5\") pod \"02971c6b-be51-4634-b3a0-661125814bea\" (UID: \"02971c6b-be51-4634-b3a0-661125814bea\") " Sep 29 19:36:26 crc kubenswrapper[4779]: I0929 19:36:26.829405 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/02971c6b-be51-4634-b3a0-661125814bea-ssh-key\") pod \"02971c6b-be51-4634-b3a0-661125814bea\" (UID: \"02971c6b-be51-4634-b3a0-661125814bea\") " Sep 29 19:36:26 crc kubenswrapper[4779]: I0929 19:36:26.836514 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/02971c6b-be51-4634-b3a0-661125814bea-kube-api-access-vqhn5" (OuterVolumeSpecName: "kube-api-access-vqhn5") pod "02971c6b-be51-4634-b3a0-661125814bea" (UID: "02971c6b-be51-4634-b3a0-661125814bea"). InnerVolumeSpecName "kube-api-access-vqhn5". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:36:26 crc kubenswrapper[4779]: I0929 19:36:26.857973 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/02971c6b-be51-4634-b3a0-661125814bea-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "02971c6b-be51-4634-b3a0-661125814bea" (UID: "02971c6b-be51-4634-b3a0-661125814bea"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:36:26 crc kubenswrapper[4779]: I0929 19:36:26.865771 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/02971c6b-be51-4634-b3a0-661125814bea-inventory" (OuterVolumeSpecName: "inventory") pod "02971c6b-be51-4634-b3a0-661125814bea" (UID: "02971c6b-be51-4634-b3a0-661125814bea"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:36:26 crc kubenswrapper[4779]: I0929 19:36:26.931168 4779 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/02971c6b-be51-4634-b3a0-661125814bea-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 29 19:36:26 crc kubenswrapper[4779]: I0929 19:36:26.931195 4779 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/02971c6b-be51-4634-b3a0-661125814bea-inventory\") on node \"crc\" DevicePath \"\"" Sep 29 19:36:26 crc kubenswrapper[4779]: I0929 19:36:26.931205 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vqhn5\" (UniqueName: \"kubernetes.io/projected/02971c6b-be51-4634-b3a0-661125814bea-kube-api-access-vqhn5\") on node \"crc\" DevicePath \"\"" Sep 29 19:36:27 crc kubenswrapper[4779]: I0929 19:36:27.266236 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-c66cd" event={"ID":"02971c6b-be51-4634-b3a0-661125814bea","Type":"ContainerDied","Data":"197e5abd97088748b7ec8195dfb4a0eef0109ca14fd3289c2ece02746dc6b9d1"} Sep 29 19:36:27 crc kubenswrapper[4779]: I0929 19:36:27.266615 4779 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="197e5abd97088748b7ec8195dfb4a0eef0109ca14fd3289c2ece02746dc6b9d1" Sep 29 19:36:27 crc kubenswrapper[4779]: I0929 19:36:27.266398 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-c66cd" Sep 29 19:36:27 crc kubenswrapper[4779]: I0929 19:36:27.381789 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-9cnjt"] Sep 29 19:36:27 crc kubenswrapper[4779]: E0929 19:36:27.382522 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="02971c6b-be51-4634-b3a0-661125814bea" containerName="configure-network-edpm-deployment-openstack-edpm-ipam" Sep 29 19:36:27 crc kubenswrapper[4779]: I0929 19:36:27.382553 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="02971c6b-be51-4634-b3a0-661125814bea" containerName="configure-network-edpm-deployment-openstack-edpm-ipam" Sep 29 19:36:27 crc kubenswrapper[4779]: I0929 19:36:27.382907 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="02971c6b-be51-4634-b3a0-661125814bea" containerName="configure-network-edpm-deployment-openstack-edpm-ipam" Sep 29 19:36:27 crc kubenswrapper[4779]: I0929 19:36:27.384076 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-9cnjt" Sep 29 19:36:27 crc kubenswrapper[4779]: I0929 19:36:27.388136 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Sep 29 19:36:27 crc kubenswrapper[4779]: I0929 19:36:27.388143 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Sep 29 19:36:27 crc kubenswrapper[4779]: I0929 19:36:27.390707 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 29 19:36:27 crc kubenswrapper[4779]: I0929 19:36:27.392671 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-9cnjt"] Sep 29 19:36:27 crc kubenswrapper[4779]: I0929 19:36:27.393044 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-v8454" Sep 29 19:36:27 crc kubenswrapper[4779]: I0929 19:36:27.542599 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/2ef530d3-702c-44e3-a066-85a59398fafc-ssh-key\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-9cnjt\" (UID: \"2ef530d3-702c-44e3-a066-85a59398fafc\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-9cnjt" Sep 29 19:36:27 crc kubenswrapper[4779]: I0929 19:36:27.542985 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/2ef530d3-702c-44e3-a066-85a59398fafc-inventory\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-9cnjt\" (UID: \"2ef530d3-702c-44e3-a066-85a59398fafc\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-9cnjt" Sep 29 19:36:27 crc kubenswrapper[4779]: I0929 19:36:27.543101 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-56btq\" (UniqueName: \"kubernetes.io/projected/2ef530d3-702c-44e3-a066-85a59398fafc-kube-api-access-56btq\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-9cnjt\" (UID: \"2ef530d3-702c-44e3-a066-85a59398fafc\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-9cnjt" Sep 29 19:36:27 crc kubenswrapper[4779]: I0929 19:36:27.644609 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/2ef530d3-702c-44e3-a066-85a59398fafc-inventory\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-9cnjt\" (UID: \"2ef530d3-702c-44e3-a066-85a59398fafc\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-9cnjt" Sep 29 19:36:27 crc kubenswrapper[4779]: I0929 19:36:27.644682 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-56btq\" (UniqueName: \"kubernetes.io/projected/2ef530d3-702c-44e3-a066-85a59398fafc-kube-api-access-56btq\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-9cnjt\" (UID: \"2ef530d3-702c-44e3-a066-85a59398fafc\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-9cnjt" Sep 29 19:36:27 crc kubenswrapper[4779]: I0929 19:36:27.644761 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/2ef530d3-702c-44e3-a066-85a59398fafc-ssh-key\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-9cnjt\" (UID: \"2ef530d3-702c-44e3-a066-85a59398fafc\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-9cnjt" Sep 29 19:36:27 crc kubenswrapper[4779]: I0929 19:36:27.650727 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/2ef530d3-702c-44e3-a066-85a59398fafc-inventory\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-9cnjt\" (UID: \"2ef530d3-702c-44e3-a066-85a59398fafc\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-9cnjt" Sep 29 19:36:27 crc kubenswrapper[4779]: I0929 19:36:27.673033 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/2ef530d3-702c-44e3-a066-85a59398fafc-ssh-key\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-9cnjt\" (UID: \"2ef530d3-702c-44e3-a066-85a59398fafc\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-9cnjt" Sep 29 19:36:27 crc kubenswrapper[4779]: I0929 19:36:27.673290 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-56btq\" (UniqueName: \"kubernetes.io/projected/2ef530d3-702c-44e3-a066-85a59398fafc-kube-api-access-56btq\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-9cnjt\" (UID: \"2ef530d3-702c-44e3-a066-85a59398fafc\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-9cnjt" Sep 29 19:36:27 crc kubenswrapper[4779]: I0929 19:36:27.704394 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-9cnjt" Sep 29 19:36:27 crc kubenswrapper[4779]: I0929 19:36:27.777649 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e2a74779-76d8-4fee-bd24-cb11d5d72915" path="/var/lib/kubelet/pods/e2a74779-76d8-4fee-bd24-cb11d5d72915/volumes" Sep 29 19:36:28 crc kubenswrapper[4779]: I0929 19:36:28.031124 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-db-create-gxdgx"] Sep 29 19:36:28 crc kubenswrapper[4779]: I0929 19:36:28.040425 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-db-create-ths9f"] Sep 29 19:36:28 crc kubenswrapper[4779]: I0929 19:36:28.049806 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-db-create-gxdgx"] Sep 29 19:36:28 crc kubenswrapper[4779]: I0929 19:36:28.056935 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-db-create-mm5l6"] Sep 29 19:36:28 crc kubenswrapper[4779]: I0929 19:36:28.063768 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-db-create-ths9f"] Sep 29 19:36:28 crc kubenswrapper[4779]: I0929 19:36:28.072421 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-db-create-mm5l6"] Sep 29 19:36:28 crc kubenswrapper[4779]: I0929 19:36:28.287373 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-9cnjt"] Sep 29 19:36:29 crc kubenswrapper[4779]: I0929 19:36:29.283661 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-9cnjt" event={"ID":"2ef530d3-702c-44e3-a066-85a59398fafc","Type":"ContainerStarted","Data":"245e3b60f48c9f76791041e849da24812125b8b32ad162c937b04832c024bf05"} Sep 29 19:36:29 crc kubenswrapper[4779]: I0929 19:36:29.284221 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-9cnjt" event={"ID":"2ef530d3-702c-44e3-a066-85a59398fafc","Type":"ContainerStarted","Data":"886e801ee67d73cd460ba0a7fd6787153e351cf527f4d016790313f1a1536286"} Sep 29 19:36:29 crc kubenswrapper[4779]: I0929 19:36:29.314978 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-9cnjt" podStartSLOduration=1.661096044 podStartE2EDuration="2.314955989s" podCreationTimestamp="2025-09-29 19:36:27 +0000 UTC" firstStartedPulling="2025-09-29 19:36:28.291376625 +0000 UTC m=+1699.175801725" lastFinishedPulling="2025-09-29 19:36:28.94523657 +0000 UTC m=+1699.829661670" observedRunningTime="2025-09-29 19:36:29.30405088 +0000 UTC m=+1700.188476010" watchObservedRunningTime="2025-09-29 19:36:29.314955989 +0000 UTC m=+1700.199381099" Sep 29 19:36:29 crc kubenswrapper[4779]: I0929 19:36:29.802140 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="37527e62-544b-42e4-9223-44fe8d4106b2" path="/var/lib/kubelet/pods/37527e62-544b-42e4-9223-44fe8d4106b2/volumes" Sep 29 19:36:29 crc kubenswrapper[4779]: I0929 19:36:29.803266 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8590a86d-3bde-4c3d-8f3f-52de9414caa6" path="/var/lib/kubelet/pods/8590a86d-3bde-4c3d-8f3f-52de9414caa6/volumes" Sep 29 19:36:29 crc kubenswrapper[4779]: I0929 19:36:29.804353 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fa13ed58-de9d-465d-aa87-9306284e4f23" path="/var/lib/kubelet/pods/fa13ed58-de9d-465d-aa87-9306284e4f23/volumes" Sep 29 19:36:34 crc kubenswrapper[4779]: I0929 19:36:34.347021 4779 generic.go:334] "Generic (PLEG): container finished" podID="2ef530d3-702c-44e3-a066-85a59398fafc" containerID="245e3b60f48c9f76791041e849da24812125b8b32ad162c937b04832c024bf05" exitCode=0 Sep 29 19:36:34 crc kubenswrapper[4779]: I0929 19:36:34.347133 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-9cnjt" event={"ID":"2ef530d3-702c-44e3-a066-85a59398fafc","Type":"ContainerDied","Data":"245e3b60f48c9f76791041e849da24812125b8b32ad162c937b04832c024bf05"} Sep 29 19:36:35 crc kubenswrapper[4779]: I0929 19:36:35.757259 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-9cnjt" Sep 29 19:36:35 crc kubenswrapper[4779]: I0929 19:36:35.814218 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/2ef530d3-702c-44e3-a066-85a59398fafc-inventory\") pod \"2ef530d3-702c-44e3-a066-85a59398fafc\" (UID: \"2ef530d3-702c-44e3-a066-85a59398fafc\") " Sep 29 19:36:35 crc kubenswrapper[4779]: I0929 19:36:35.814322 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/2ef530d3-702c-44e3-a066-85a59398fafc-ssh-key\") pod \"2ef530d3-702c-44e3-a066-85a59398fafc\" (UID: \"2ef530d3-702c-44e3-a066-85a59398fafc\") " Sep 29 19:36:35 crc kubenswrapper[4779]: I0929 19:36:35.814781 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-56btq\" (UniqueName: \"kubernetes.io/projected/2ef530d3-702c-44e3-a066-85a59398fafc-kube-api-access-56btq\") pod \"2ef530d3-702c-44e3-a066-85a59398fafc\" (UID: \"2ef530d3-702c-44e3-a066-85a59398fafc\") " Sep 29 19:36:35 crc kubenswrapper[4779]: I0929 19:36:35.827922 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2ef530d3-702c-44e3-a066-85a59398fafc-kube-api-access-56btq" (OuterVolumeSpecName: "kube-api-access-56btq") pod "2ef530d3-702c-44e3-a066-85a59398fafc" (UID: "2ef530d3-702c-44e3-a066-85a59398fafc"). InnerVolumeSpecName "kube-api-access-56btq". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:36:35 crc kubenswrapper[4779]: I0929 19:36:35.840668 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2ef530d3-702c-44e3-a066-85a59398fafc-inventory" (OuterVolumeSpecName: "inventory") pod "2ef530d3-702c-44e3-a066-85a59398fafc" (UID: "2ef530d3-702c-44e3-a066-85a59398fafc"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:36:35 crc kubenswrapper[4779]: I0929 19:36:35.863422 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2ef530d3-702c-44e3-a066-85a59398fafc-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "2ef530d3-702c-44e3-a066-85a59398fafc" (UID: "2ef530d3-702c-44e3-a066-85a59398fafc"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:36:35 crc kubenswrapper[4779]: I0929 19:36:35.916834 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-56btq\" (UniqueName: \"kubernetes.io/projected/2ef530d3-702c-44e3-a066-85a59398fafc-kube-api-access-56btq\") on node \"crc\" DevicePath \"\"" Sep 29 19:36:35 crc kubenswrapper[4779]: I0929 19:36:35.916881 4779 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/2ef530d3-702c-44e3-a066-85a59398fafc-inventory\") on node \"crc\" DevicePath \"\"" Sep 29 19:36:35 crc kubenswrapper[4779]: I0929 19:36:35.916895 4779 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/2ef530d3-702c-44e3-a066-85a59398fafc-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 29 19:36:36 crc kubenswrapper[4779]: I0929 19:36:36.364995 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-9cnjt" event={"ID":"2ef530d3-702c-44e3-a066-85a59398fafc","Type":"ContainerDied","Data":"886e801ee67d73cd460ba0a7fd6787153e351cf527f4d016790313f1a1536286"} Sep 29 19:36:36 crc kubenswrapper[4779]: I0929 19:36:36.365045 4779 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="886e801ee67d73cd460ba0a7fd6787153e351cf527f4d016790313f1a1536286" Sep 29 19:36:36 crc kubenswrapper[4779]: I0929 19:36:36.365060 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-9cnjt" Sep 29 19:36:36 crc kubenswrapper[4779]: I0929 19:36:36.496919 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-lc87v"] Sep 29 19:36:36 crc kubenswrapper[4779]: E0929 19:36:36.497602 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2ef530d3-702c-44e3-a066-85a59398fafc" containerName="validate-network-edpm-deployment-openstack-edpm-ipam" Sep 29 19:36:36 crc kubenswrapper[4779]: I0929 19:36:36.497650 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="2ef530d3-702c-44e3-a066-85a59398fafc" containerName="validate-network-edpm-deployment-openstack-edpm-ipam" Sep 29 19:36:36 crc kubenswrapper[4779]: I0929 19:36:36.497878 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="2ef530d3-702c-44e3-a066-85a59398fafc" containerName="validate-network-edpm-deployment-openstack-edpm-ipam" Sep 29 19:36:36 crc kubenswrapper[4779]: I0929 19:36:36.498636 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-lc87v" Sep 29 19:36:36 crc kubenswrapper[4779]: I0929 19:36:36.504646 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-v8454" Sep 29 19:36:36 crc kubenswrapper[4779]: I0929 19:36:36.504872 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Sep 29 19:36:36 crc kubenswrapper[4779]: I0929 19:36:36.504912 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Sep 29 19:36:36 crc kubenswrapper[4779]: I0929 19:36:36.505065 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 29 19:36:36 crc kubenswrapper[4779]: I0929 19:36:36.509583 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-lc87v"] Sep 29 19:36:36 crc kubenswrapper[4779]: I0929 19:36:36.526861 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/0651501f-91fe-410c-9e0c-d1e49760bedd-inventory\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-lc87v\" (UID: \"0651501f-91fe-410c-9e0c-d1e49760bedd\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-lc87v" Sep 29 19:36:36 crc kubenswrapper[4779]: I0929 19:36:36.527041 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/0651501f-91fe-410c-9e0c-d1e49760bedd-ssh-key\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-lc87v\" (UID: \"0651501f-91fe-410c-9e0c-d1e49760bedd\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-lc87v" Sep 29 19:36:36 crc kubenswrapper[4779]: I0929 19:36:36.527213 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j7nxn\" (UniqueName: \"kubernetes.io/projected/0651501f-91fe-410c-9e0c-d1e49760bedd-kube-api-access-j7nxn\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-lc87v\" (UID: \"0651501f-91fe-410c-9e0c-d1e49760bedd\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-lc87v" Sep 29 19:36:36 crc kubenswrapper[4779]: I0929 19:36:36.628781 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j7nxn\" (UniqueName: \"kubernetes.io/projected/0651501f-91fe-410c-9e0c-d1e49760bedd-kube-api-access-j7nxn\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-lc87v\" (UID: \"0651501f-91fe-410c-9e0c-d1e49760bedd\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-lc87v" Sep 29 19:36:36 crc kubenswrapper[4779]: I0929 19:36:36.628872 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/0651501f-91fe-410c-9e0c-d1e49760bedd-inventory\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-lc87v\" (UID: \"0651501f-91fe-410c-9e0c-d1e49760bedd\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-lc87v" Sep 29 19:36:36 crc kubenswrapper[4779]: I0929 19:36:36.628992 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/0651501f-91fe-410c-9e0c-d1e49760bedd-ssh-key\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-lc87v\" (UID: \"0651501f-91fe-410c-9e0c-d1e49760bedd\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-lc87v" Sep 29 19:36:36 crc kubenswrapper[4779]: I0929 19:36:36.634421 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/0651501f-91fe-410c-9e0c-d1e49760bedd-inventory\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-lc87v\" (UID: \"0651501f-91fe-410c-9e0c-d1e49760bedd\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-lc87v" Sep 29 19:36:36 crc kubenswrapper[4779]: I0929 19:36:36.634751 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/0651501f-91fe-410c-9e0c-d1e49760bedd-ssh-key\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-lc87v\" (UID: \"0651501f-91fe-410c-9e0c-d1e49760bedd\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-lc87v" Sep 29 19:36:36 crc kubenswrapper[4779]: I0929 19:36:36.644864 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j7nxn\" (UniqueName: \"kubernetes.io/projected/0651501f-91fe-410c-9e0c-d1e49760bedd-kube-api-access-j7nxn\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-lc87v\" (UID: \"0651501f-91fe-410c-9e0c-d1e49760bedd\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-lc87v" Sep 29 19:36:36 crc kubenswrapper[4779]: I0929 19:36:36.821607 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-lc87v" Sep 29 19:36:37 crc kubenswrapper[4779]: W0929 19:36:37.375515 4779 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0651501f_91fe_410c_9e0c_d1e49760bedd.slice/crio-d9d851417626e201a497c27c30da2aabe9adacfc480609956e0f24f6c4b30413 WatchSource:0}: Error finding container d9d851417626e201a497c27c30da2aabe9adacfc480609956e0f24f6c4b30413: Status 404 returned error can't find the container with id d9d851417626e201a497c27c30da2aabe9adacfc480609956e0f24f6c4b30413 Sep 29 19:36:37 crc kubenswrapper[4779]: I0929 19:36:37.381342 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-lc87v"] Sep 29 19:36:38 crc kubenswrapper[4779]: I0929 19:36:38.398264 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-lc87v" event={"ID":"0651501f-91fe-410c-9e0c-d1e49760bedd","Type":"ContainerStarted","Data":"c7e03052e1f44ba499e47005248048342fbfbcaf2ca1496121602ddeeb4bff3b"} Sep 29 19:36:38 crc kubenswrapper[4779]: I0929 19:36:38.398947 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-lc87v" event={"ID":"0651501f-91fe-410c-9e0c-d1e49760bedd","Type":"ContainerStarted","Data":"d9d851417626e201a497c27c30da2aabe9adacfc480609956e0f24f6c4b30413"} Sep 29 19:36:38 crc kubenswrapper[4779]: I0929 19:36:38.429652 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-lc87v" podStartSLOduration=1.79922945 podStartE2EDuration="2.429624253s" podCreationTimestamp="2025-09-29 19:36:36 +0000 UTC" firstStartedPulling="2025-09-29 19:36:37.377969081 +0000 UTC m=+1708.262394181" lastFinishedPulling="2025-09-29 19:36:38.008363884 +0000 UTC m=+1708.892788984" observedRunningTime="2025-09-29 19:36:38.41892935 +0000 UTC m=+1709.303354450" watchObservedRunningTime="2025-09-29 19:36:38.429624253 +0000 UTC m=+1709.314049383" Sep 29 19:36:39 crc kubenswrapper[4779]: I0929 19:36:39.772160 4779 scope.go:117] "RemoveContainer" containerID="cabb38360f260f953132a0f1382a2873fbc40a1355bf1109d5e07b29c41a2fbb" Sep 29 19:36:39 crc kubenswrapper[4779]: E0929 19:36:39.772750 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d5cnr_openshift-machine-config-operator(476bc421-1113-455e-bcc8-e207e47dad19)\"" pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" podUID="476bc421-1113-455e-bcc8-e207e47dad19" Sep 29 19:36:46 crc kubenswrapper[4779]: I0929 19:36:46.033521 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-c17f-account-create-pmqwr"] Sep 29 19:36:46 crc kubenswrapper[4779]: I0929 19:36:46.047010 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-ed29-account-create-p2lzw"] Sep 29 19:36:46 crc kubenswrapper[4779]: I0929 19:36:46.057018 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-5f16-account-create-m6x8h"] Sep 29 19:36:46 crc kubenswrapper[4779]: I0929 19:36:46.067815 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-ed29-account-create-p2lzw"] Sep 29 19:36:46 crc kubenswrapper[4779]: I0929 19:36:46.075833 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-c17f-account-create-pmqwr"] Sep 29 19:36:46 crc kubenswrapper[4779]: I0929 19:36:46.082200 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-5f16-account-create-m6x8h"] Sep 29 19:36:47 crc kubenswrapper[4779]: I0929 19:36:47.778576 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="511539e5-fd9d-4376-ae75-4c308f748af9" path="/var/lib/kubelet/pods/511539e5-fd9d-4376-ae75-4c308f748af9/volumes" Sep 29 19:36:47 crc kubenswrapper[4779]: I0929 19:36:47.780539 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5e31e631-57a3-4b9b-8d0a-4a2e3cf7d083" path="/var/lib/kubelet/pods/5e31e631-57a3-4b9b-8d0a-4a2e3cf7d083/volumes" Sep 29 19:36:47 crc kubenswrapper[4779]: I0929 19:36:47.781030 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="affb28d4-02a0-4da0-9160-14b003815d10" path="/var/lib/kubelet/pods/affb28d4-02a0-4da0-9160-14b003815d10/volumes" Sep 29 19:36:51 crc kubenswrapper[4779]: I0929 19:36:51.766350 4779 scope.go:117] "RemoveContainer" containerID="cabb38360f260f953132a0f1382a2873fbc40a1355bf1109d5e07b29c41a2fbb" Sep 29 19:36:51 crc kubenswrapper[4779]: E0929 19:36:51.766930 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d5cnr_openshift-machine-config-operator(476bc421-1113-455e-bcc8-e207e47dad19)\"" pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" podUID="476bc421-1113-455e-bcc8-e207e47dad19" Sep 29 19:37:02 crc kubenswrapper[4779]: I0929 19:37:02.766566 4779 scope.go:117] "RemoveContainer" containerID="cabb38360f260f953132a0f1382a2873fbc40a1355bf1109d5e07b29c41a2fbb" Sep 29 19:37:02 crc kubenswrapper[4779]: E0929 19:37:02.767129 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d5cnr_openshift-machine-config-operator(476bc421-1113-455e-bcc8-e207e47dad19)\"" pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" podUID="476bc421-1113-455e-bcc8-e207e47dad19" Sep 29 19:37:13 crc kubenswrapper[4779]: I0929 19:37:13.057193 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-z552g"] Sep 29 19:37:13 crc kubenswrapper[4779]: I0929 19:37:13.071802 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-z552g"] Sep 29 19:37:13 crc kubenswrapper[4779]: I0929 19:37:13.785414 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1ffdc490-1007-41e8-a410-97a78d400395" path="/var/lib/kubelet/pods/1ffdc490-1007-41e8-a410-97a78d400395/volumes" Sep 29 19:37:15 crc kubenswrapper[4779]: I0929 19:37:15.828286 4779 scope.go:117] "RemoveContainer" containerID="db998984beabddce9c56c41f4f1a92332da7d7b9924b4539e86e4742360f18aa" Sep 29 19:37:15 crc kubenswrapper[4779]: I0929 19:37:15.875229 4779 scope.go:117] "RemoveContainer" containerID="104d8711f2e258f7c757a2a97f339867313c4108f16af3aae11a3e03b3d809d5" Sep 29 19:37:15 crc kubenswrapper[4779]: I0929 19:37:15.945397 4779 scope.go:117] "RemoveContainer" containerID="080b50a87bcf31fa21cbd13bfc9fc484d061841d83b28176dad17028cfa30326" Sep 29 19:37:16 crc kubenswrapper[4779]: I0929 19:37:16.019903 4779 scope.go:117] "RemoveContainer" containerID="84f09b5bf7a2b46771b260161ea05543adbc390690629f2941bf37e0754f405c" Sep 29 19:37:16 crc kubenswrapper[4779]: I0929 19:37:16.062051 4779 scope.go:117] "RemoveContainer" containerID="49fa49fd5f06670c57a43269ae45c6080feadd8d98820d10f268c747299a8ae2" Sep 29 19:37:16 crc kubenswrapper[4779]: I0929 19:37:16.123721 4779 scope.go:117] "RemoveContainer" containerID="35f5194d8ae85935a5ffc719458a06390a2604ef60d79e106f9d12797b4e0146" Sep 29 19:37:16 crc kubenswrapper[4779]: I0929 19:37:16.166463 4779 scope.go:117] "RemoveContainer" containerID="5c9078ed1682e25224e0006128779813b1b7eb9d196622e03ab4026ee396f725" Sep 29 19:37:16 crc kubenswrapper[4779]: I0929 19:37:16.195598 4779 scope.go:117] "RemoveContainer" containerID="add0df3627e6f15e5041d51186e821afb4f3aa74891ee6298afb7b80dcf5c837" Sep 29 19:37:16 crc kubenswrapper[4779]: I0929 19:37:16.223853 4779 scope.go:117] "RemoveContainer" containerID="b94e803bb06946a40005468889e5092af1fce22e2c90407285b4489fe644f865" Sep 29 19:37:16 crc kubenswrapper[4779]: I0929 19:37:16.767191 4779 scope.go:117] "RemoveContainer" containerID="cabb38360f260f953132a0f1382a2873fbc40a1355bf1109d5e07b29c41a2fbb" Sep 29 19:37:16 crc kubenswrapper[4779]: E0929 19:37:16.767490 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d5cnr_openshift-machine-config-operator(476bc421-1113-455e-bcc8-e207e47dad19)\"" pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" podUID="476bc421-1113-455e-bcc8-e207e47dad19" Sep 29 19:37:16 crc kubenswrapper[4779]: I0929 19:37:16.805416 4779 generic.go:334] "Generic (PLEG): container finished" podID="0651501f-91fe-410c-9e0c-d1e49760bedd" containerID="c7e03052e1f44ba499e47005248048342fbfbcaf2ca1496121602ddeeb4bff3b" exitCode=0 Sep 29 19:37:16 crc kubenswrapper[4779]: I0929 19:37:16.805471 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-lc87v" event={"ID":"0651501f-91fe-410c-9e0c-d1e49760bedd","Type":"ContainerDied","Data":"c7e03052e1f44ba499e47005248048342fbfbcaf2ca1496121602ddeeb4bff3b"} Sep 29 19:37:18 crc kubenswrapper[4779]: I0929 19:37:18.281389 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-lc87v" Sep 29 19:37:18 crc kubenswrapper[4779]: I0929 19:37:18.375948 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/0651501f-91fe-410c-9e0c-d1e49760bedd-inventory\") pod \"0651501f-91fe-410c-9e0c-d1e49760bedd\" (UID: \"0651501f-91fe-410c-9e0c-d1e49760bedd\") " Sep 29 19:37:18 crc kubenswrapper[4779]: I0929 19:37:18.376052 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/0651501f-91fe-410c-9e0c-d1e49760bedd-ssh-key\") pod \"0651501f-91fe-410c-9e0c-d1e49760bedd\" (UID: \"0651501f-91fe-410c-9e0c-d1e49760bedd\") " Sep 29 19:37:18 crc kubenswrapper[4779]: I0929 19:37:18.376186 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-j7nxn\" (UniqueName: \"kubernetes.io/projected/0651501f-91fe-410c-9e0c-d1e49760bedd-kube-api-access-j7nxn\") pod \"0651501f-91fe-410c-9e0c-d1e49760bedd\" (UID: \"0651501f-91fe-410c-9e0c-d1e49760bedd\") " Sep 29 19:37:18 crc kubenswrapper[4779]: I0929 19:37:18.384215 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0651501f-91fe-410c-9e0c-d1e49760bedd-kube-api-access-j7nxn" (OuterVolumeSpecName: "kube-api-access-j7nxn") pod "0651501f-91fe-410c-9e0c-d1e49760bedd" (UID: "0651501f-91fe-410c-9e0c-d1e49760bedd"). InnerVolumeSpecName "kube-api-access-j7nxn". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:37:18 crc kubenswrapper[4779]: I0929 19:37:18.411137 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0651501f-91fe-410c-9e0c-d1e49760bedd-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "0651501f-91fe-410c-9e0c-d1e49760bedd" (UID: "0651501f-91fe-410c-9e0c-d1e49760bedd"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:37:18 crc kubenswrapper[4779]: I0929 19:37:18.418804 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0651501f-91fe-410c-9e0c-d1e49760bedd-inventory" (OuterVolumeSpecName: "inventory") pod "0651501f-91fe-410c-9e0c-d1e49760bedd" (UID: "0651501f-91fe-410c-9e0c-d1e49760bedd"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:37:18 crc kubenswrapper[4779]: I0929 19:37:18.478820 4779 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/0651501f-91fe-410c-9e0c-d1e49760bedd-inventory\") on node \"crc\" DevicePath \"\"" Sep 29 19:37:18 crc kubenswrapper[4779]: I0929 19:37:18.478875 4779 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/0651501f-91fe-410c-9e0c-d1e49760bedd-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 29 19:37:18 crc kubenswrapper[4779]: I0929 19:37:18.478897 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-j7nxn\" (UniqueName: \"kubernetes.io/projected/0651501f-91fe-410c-9e0c-d1e49760bedd-kube-api-access-j7nxn\") on node \"crc\" DevicePath \"\"" Sep 29 19:37:18 crc kubenswrapper[4779]: I0929 19:37:18.831207 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-lc87v" event={"ID":"0651501f-91fe-410c-9e0c-d1e49760bedd","Type":"ContainerDied","Data":"d9d851417626e201a497c27c30da2aabe9adacfc480609956e0f24f6c4b30413"} Sep 29 19:37:18 crc kubenswrapper[4779]: I0929 19:37:18.831266 4779 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d9d851417626e201a497c27c30da2aabe9adacfc480609956e0f24f6c4b30413" Sep 29 19:37:18 crc kubenswrapper[4779]: I0929 19:37:18.831363 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-lc87v" Sep 29 19:37:18 crc kubenswrapper[4779]: I0929 19:37:18.948886 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-xxmr6"] Sep 29 19:37:18 crc kubenswrapper[4779]: E0929 19:37:18.949348 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0651501f-91fe-410c-9e0c-d1e49760bedd" containerName="install-os-edpm-deployment-openstack-edpm-ipam" Sep 29 19:37:18 crc kubenswrapper[4779]: I0929 19:37:18.949367 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="0651501f-91fe-410c-9e0c-d1e49760bedd" containerName="install-os-edpm-deployment-openstack-edpm-ipam" Sep 29 19:37:18 crc kubenswrapper[4779]: I0929 19:37:18.949571 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="0651501f-91fe-410c-9e0c-d1e49760bedd" containerName="install-os-edpm-deployment-openstack-edpm-ipam" Sep 29 19:37:18 crc kubenswrapper[4779]: I0929 19:37:18.950209 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-xxmr6" Sep 29 19:37:18 crc kubenswrapper[4779]: I0929 19:37:18.953419 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Sep 29 19:37:18 crc kubenswrapper[4779]: I0929 19:37:18.953945 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-v8454" Sep 29 19:37:18 crc kubenswrapper[4779]: I0929 19:37:18.954216 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 29 19:37:18 crc kubenswrapper[4779]: I0929 19:37:18.954346 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Sep 29 19:37:18 crc kubenswrapper[4779]: I0929 19:37:18.969650 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-xxmr6"] Sep 29 19:37:19 crc kubenswrapper[4779]: I0929 19:37:19.094659 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e2235a29-5c01-4d29-a4cb-97f0abe8ca63-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-xxmr6\" (UID: \"e2235a29-5c01-4d29-a4cb-97f0abe8ca63\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-xxmr6" Sep 29 19:37:19 crc kubenswrapper[4779]: I0929 19:37:19.094747 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/e2235a29-5c01-4d29-a4cb-97f0abe8ca63-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-xxmr6\" (UID: \"e2235a29-5c01-4d29-a4cb-97f0abe8ca63\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-xxmr6" Sep 29 19:37:19 crc kubenswrapper[4779]: I0929 19:37:19.094781 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fnbxw\" (UniqueName: \"kubernetes.io/projected/e2235a29-5c01-4d29-a4cb-97f0abe8ca63-kube-api-access-fnbxw\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-xxmr6\" (UID: \"e2235a29-5c01-4d29-a4cb-97f0abe8ca63\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-xxmr6" Sep 29 19:37:19 crc kubenswrapper[4779]: I0929 19:37:19.196732 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e2235a29-5c01-4d29-a4cb-97f0abe8ca63-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-xxmr6\" (UID: \"e2235a29-5c01-4d29-a4cb-97f0abe8ca63\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-xxmr6" Sep 29 19:37:19 crc kubenswrapper[4779]: I0929 19:37:19.196893 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/e2235a29-5c01-4d29-a4cb-97f0abe8ca63-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-xxmr6\" (UID: \"e2235a29-5c01-4d29-a4cb-97f0abe8ca63\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-xxmr6" Sep 29 19:37:19 crc kubenswrapper[4779]: I0929 19:37:19.196956 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fnbxw\" (UniqueName: \"kubernetes.io/projected/e2235a29-5c01-4d29-a4cb-97f0abe8ca63-kube-api-access-fnbxw\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-xxmr6\" (UID: \"e2235a29-5c01-4d29-a4cb-97f0abe8ca63\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-xxmr6" Sep 29 19:37:19 crc kubenswrapper[4779]: I0929 19:37:19.203015 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/e2235a29-5c01-4d29-a4cb-97f0abe8ca63-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-xxmr6\" (UID: \"e2235a29-5c01-4d29-a4cb-97f0abe8ca63\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-xxmr6" Sep 29 19:37:19 crc kubenswrapper[4779]: I0929 19:37:19.208121 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e2235a29-5c01-4d29-a4cb-97f0abe8ca63-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-xxmr6\" (UID: \"e2235a29-5c01-4d29-a4cb-97f0abe8ca63\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-xxmr6" Sep 29 19:37:19 crc kubenswrapper[4779]: I0929 19:37:19.225111 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fnbxw\" (UniqueName: \"kubernetes.io/projected/e2235a29-5c01-4d29-a4cb-97f0abe8ca63-kube-api-access-fnbxw\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-xxmr6\" (UID: \"e2235a29-5c01-4d29-a4cb-97f0abe8ca63\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-xxmr6" Sep 29 19:37:19 crc kubenswrapper[4779]: I0929 19:37:19.273488 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-xxmr6" Sep 29 19:37:19 crc kubenswrapper[4779]: I0929 19:37:19.878440 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-xxmr6"] Sep 29 19:37:19 crc kubenswrapper[4779]: W0929 19:37:19.878567 4779 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode2235a29_5c01_4d29_a4cb_97f0abe8ca63.slice/crio-24aeab13d4bebb35081c8f571e0623250589e50c2e34d26388c4a0c8f30d12b4 WatchSource:0}: Error finding container 24aeab13d4bebb35081c8f571e0623250589e50c2e34d26388c4a0c8f30d12b4: Status 404 returned error can't find the container with id 24aeab13d4bebb35081c8f571e0623250589e50c2e34d26388c4a0c8f30d12b4 Sep 29 19:37:20 crc kubenswrapper[4779]: I0929 19:37:20.853673 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-xxmr6" event={"ID":"e2235a29-5c01-4d29-a4cb-97f0abe8ca63","Type":"ContainerStarted","Data":"5a65d0f3490039a422c427961f0ec867805b8f74bcd076175d5ad4216065c773"} Sep 29 19:37:20 crc kubenswrapper[4779]: I0929 19:37:20.853979 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-xxmr6" event={"ID":"e2235a29-5c01-4d29-a4cb-97f0abe8ca63","Type":"ContainerStarted","Data":"24aeab13d4bebb35081c8f571e0623250589e50c2e34d26388c4a0c8f30d12b4"} Sep 29 19:37:20 crc kubenswrapper[4779]: I0929 19:37:20.874776 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-xxmr6" podStartSLOduration=2.470796617 podStartE2EDuration="2.874758563s" podCreationTimestamp="2025-09-29 19:37:18 +0000 UTC" firstStartedPulling="2025-09-29 19:37:19.880854811 +0000 UTC m=+1750.765279911" lastFinishedPulling="2025-09-29 19:37:20.284816717 +0000 UTC m=+1751.169241857" observedRunningTime="2025-09-29 19:37:20.872018268 +0000 UTC m=+1751.756443378" watchObservedRunningTime="2025-09-29 19:37:20.874758563 +0000 UTC m=+1751.759183663" Sep 29 19:37:31 crc kubenswrapper[4779]: I0929 19:37:31.766102 4779 scope.go:117] "RemoveContainer" containerID="cabb38360f260f953132a0f1382a2873fbc40a1355bf1109d5e07b29c41a2fbb" Sep 29 19:37:31 crc kubenswrapper[4779]: E0929 19:37:31.767205 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d5cnr_openshift-machine-config-operator(476bc421-1113-455e-bcc8-e207e47dad19)\"" pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" podUID="476bc421-1113-455e-bcc8-e207e47dad19" Sep 29 19:37:34 crc kubenswrapper[4779]: I0929 19:37:34.046416 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-cell-mapping-9jlbk"] Sep 29 19:37:34 crc kubenswrapper[4779]: I0929 19:37:34.056273 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-cell-mapping-9jlbk"] Sep 29 19:37:35 crc kubenswrapper[4779]: I0929 19:37:35.785112 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bf8c83fc-5e0d-41f8-b1f6-f7004e73ec39" path="/var/lib/kubelet/pods/bf8c83fc-5e0d-41f8-b1f6-f7004e73ec39/volumes" Sep 29 19:37:36 crc kubenswrapper[4779]: I0929 19:37:36.038400 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-dbzcc"] Sep 29 19:37:36 crc kubenswrapper[4779]: I0929 19:37:36.054852 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-dbzcc"] Sep 29 19:37:37 crc kubenswrapper[4779]: I0929 19:37:37.775654 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4e3a6b3d-7933-45c0-934c-00846c0783d7" path="/var/lib/kubelet/pods/4e3a6b3d-7933-45c0-934c-00846c0783d7/volumes" Sep 29 19:37:42 crc kubenswrapper[4779]: I0929 19:37:42.766708 4779 scope.go:117] "RemoveContainer" containerID="cabb38360f260f953132a0f1382a2873fbc40a1355bf1109d5e07b29c41a2fbb" Sep 29 19:37:42 crc kubenswrapper[4779]: E0929 19:37:42.767587 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d5cnr_openshift-machine-config-operator(476bc421-1113-455e-bcc8-e207e47dad19)\"" pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" podUID="476bc421-1113-455e-bcc8-e207e47dad19" Sep 29 19:37:54 crc kubenswrapper[4779]: I0929 19:37:54.767014 4779 scope.go:117] "RemoveContainer" containerID="cabb38360f260f953132a0f1382a2873fbc40a1355bf1109d5e07b29c41a2fbb" Sep 29 19:37:54 crc kubenswrapper[4779]: E0929 19:37:54.768360 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d5cnr_openshift-machine-config-operator(476bc421-1113-455e-bcc8-e207e47dad19)\"" pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" podUID="476bc421-1113-455e-bcc8-e207e47dad19" Sep 29 19:37:55 crc kubenswrapper[4779]: I0929 19:37:55.029273 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-cell-mapping-97n2m"] Sep 29 19:37:55 crc kubenswrapper[4779]: I0929 19:37:55.036500 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-cell-mapping-97n2m"] Sep 29 19:37:55 crc kubenswrapper[4779]: I0929 19:37:55.779283 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b0792c39-e57a-412f-96b3-c40605b6b146" path="/var/lib/kubelet/pods/b0792c39-e57a-412f-96b3-c40605b6b146/volumes" Sep 29 19:38:05 crc kubenswrapper[4779]: I0929 19:38:05.767218 4779 scope.go:117] "RemoveContainer" containerID="cabb38360f260f953132a0f1382a2873fbc40a1355bf1109d5e07b29c41a2fbb" Sep 29 19:38:05 crc kubenswrapper[4779]: E0929 19:38:05.768748 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d5cnr_openshift-machine-config-operator(476bc421-1113-455e-bcc8-e207e47dad19)\"" pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" podUID="476bc421-1113-455e-bcc8-e207e47dad19" Sep 29 19:38:16 crc kubenswrapper[4779]: I0929 19:38:16.442027 4779 scope.go:117] "RemoveContainer" containerID="926b73d08bda76eac4969ce958a86ccd0d9bc085a58523b570bb045b65784955" Sep 29 19:38:16 crc kubenswrapper[4779]: I0929 19:38:16.505193 4779 scope.go:117] "RemoveContainer" containerID="ecb99029c6153550ca912aa5f524f76a477af64b2ac01c3ad77b4beb39652991" Sep 29 19:38:16 crc kubenswrapper[4779]: I0929 19:38:16.565806 4779 scope.go:117] "RemoveContainer" containerID="6aa05722e5d057c8fcc215edfab704dadea3447805c2496ee9ee2f263c29dedc" Sep 29 19:38:18 crc kubenswrapper[4779]: I0929 19:38:18.461593 4779 generic.go:334] "Generic (PLEG): container finished" podID="e2235a29-5c01-4d29-a4cb-97f0abe8ca63" containerID="5a65d0f3490039a422c427961f0ec867805b8f74bcd076175d5ad4216065c773" exitCode=0 Sep 29 19:38:18 crc kubenswrapper[4779]: I0929 19:38:18.461683 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-xxmr6" event={"ID":"e2235a29-5c01-4d29-a4cb-97f0abe8ca63","Type":"ContainerDied","Data":"5a65d0f3490039a422c427961f0ec867805b8f74bcd076175d5ad4216065c773"} Sep 29 19:38:18 crc kubenswrapper[4779]: I0929 19:38:18.766239 4779 scope.go:117] "RemoveContainer" containerID="cabb38360f260f953132a0f1382a2873fbc40a1355bf1109d5e07b29c41a2fbb" Sep 29 19:38:18 crc kubenswrapper[4779]: E0929 19:38:18.766767 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d5cnr_openshift-machine-config-operator(476bc421-1113-455e-bcc8-e207e47dad19)\"" pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" podUID="476bc421-1113-455e-bcc8-e207e47dad19" Sep 29 19:38:19 crc kubenswrapper[4779]: I0929 19:38:19.929847 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-xxmr6" Sep 29 19:38:20 crc kubenswrapper[4779]: I0929 19:38:20.055126 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/e2235a29-5c01-4d29-a4cb-97f0abe8ca63-ssh-key\") pod \"e2235a29-5c01-4d29-a4cb-97f0abe8ca63\" (UID: \"e2235a29-5c01-4d29-a4cb-97f0abe8ca63\") " Sep 29 19:38:20 crc kubenswrapper[4779]: I0929 19:38:20.055798 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e2235a29-5c01-4d29-a4cb-97f0abe8ca63-inventory\") pod \"e2235a29-5c01-4d29-a4cb-97f0abe8ca63\" (UID: \"e2235a29-5c01-4d29-a4cb-97f0abe8ca63\") " Sep 29 19:38:20 crc kubenswrapper[4779]: I0929 19:38:20.055857 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fnbxw\" (UniqueName: \"kubernetes.io/projected/e2235a29-5c01-4d29-a4cb-97f0abe8ca63-kube-api-access-fnbxw\") pod \"e2235a29-5c01-4d29-a4cb-97f0abe8ca63\" (UID: \"e2235a29-5c01-4d29-a4cb-97f0abe8ca63\") " Sep 29 19:38:20 crc kubenswrapper[4779]: I0929 19:38:20.061097 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e2235a29-5c01-4d29-a4cb-97f0abe8ca63-kube-api-access-fnbxw" (OuterVolumeSpecName: "kube-api-access-fnbxw") pod "e2235a29-5c01-4d29-a4cb-97f0abe8ca63" (UID: "e2235a29-5c01-4d29-a4cb-97f0abe8ca63"). InnerVolumeSpecName "kube-api-access-fnbxw". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:38:20 crc kubenswrapper[4779]: I0929 19:38:20.090067 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e2235a29-5c01-4d29-a4cb-97f0abe8ca63-inventory" (OuterVolumeSpecName: "inventory") pod "e2235a29-5c01-4d29-a4cb-97f0abe8ca63" (UID: "e2235a29-5c01-4d29-a4cb-97f0abe8ca63"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:38:20 crc kubenswrapper[4779]: I0929 19:38:20.103826 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e2235a29-5c01-4d29-a4cb-97f0abe8ca63-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "e2235a29-5c01-4d29-a4cb-97f0abe8ca63" (UID: "e2235a29-5c01-4d29-a4cb-97f0abe8ca63"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:38:20 crc kubenswrapper[4779]: I0929 19:38:20.161905 4779 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e2235a29-5c01-4d29-a4cb-97f0abe8ca63-inventory\") on node \"crc\" DevicePath \"\"" Sep 29 19:38:20 crc kubenswrapper[4779]: I0929 19:38:20.162247 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fnbxw\" (UniqueName: \"kubernetes.io/projected/e2235a29-5c01-4d29-a4cb-97f0abe8ca63-kube-api-access-fnbxw\") on node \"crc\" DevicePath \"\"" Sep 29 19:38:20 crc kubenswrapper[4779]: I0929 19:38:20.162435 4779 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/e2235a29-5c01-4d29-a4cb-97f0abe8ca63-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 29 19:38:20 crc kubenswrapper[4779]: I0929 19:38:20.482220 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-xxmr6" event={"ID":"e2235a29-5c01-4d29-a4cb-97f0abe8ca63","Type":"ContainerDied","Data":"24aeab13d4bebb35081c8f571e0623250589e50c2e34d26388c4a0c8f30d12b4"} Sep 29 19:38:20 crc kubenswrapper[4779]: I0929 19:38:20.482267 4779 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="24aeab13d4bebb35081c8f571e0623250589e50c2e34d26388c4a0c8f30d12b4" Sep 29 19:38:20 crc kubenswrapper[4779]: I0929 19:38:20.482391 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-xxmr6" Sep 29 19:38:20 crc kubenswrapper[4779]: I0929 19:38:20.578641 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-nrsx2"] Sep 29 19:38:20 crc kubenswrapper[4779]: E0929 19:38:20.579272 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e2235a29-5c01-4d29-a4cb-97f0abe8ca63" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Sep 29 19:38:20 crc kubenswrapper[4779]: I0929 19:38:20.579302 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="e2235a29-5c01-4d29-a4cb-97f0abe8ca63" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Sep 29 19:38:20 crc kubenswrapper[4779]: I0929 19:38:20.579658 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="e2235a29-5c01-4d29-a4cb-97f0abe8ca63" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Sep 29 19:38:20 crc kubenswrapper[4779]: I0929 19:38:20.580870 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-nrsx2" Sep 29 19:38:20 crc kubenswrapper[4779]: I0929 19:38:20.583217 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Sep 29 19:38:20 crc kubenswrapper[4779]: I0929 19:38:20.584297 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-v8454" Sep 29 19:38:20 crc kubenswrapper[4779]: I0929 19:38:20.584435 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 29 19:38:20 crc kubenswrapper[4779]: I0929 19:38:20.586280 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Sep 29 19:38:20 crc kubenswrapper[4779]: I0929 19:38:20.588404 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-nrsx2"] Sep 29 19:38:20 crc kubenswrapper[4779]: I0929 19:38:20.673436 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/a075ace8-eeae-4fa5-9353-72e217e82dfd-inventory-0\") pod \"ssh-known-hosts-edpm-deployment-nrsx2\" (UID: \"a075ace8-eeae-4fa5-9353-72e217e82dfd\") " pod="openstack/ssh-known-hosts-edpm-deployment-nrsx2" Sep 29 19:38:20 crc kubenswrapper[4779]: I0929 19:38:20.673559 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-25rcs\" (UniqueName: \"kubernetes.io/projected/a075ace8-eeae-4fa5-9353-72e217e82dfd-kube-api-access-25rcs\") pod \"ssh-known-hosts-edpm-deployment-nrsx2\" (UID: \"a075ace8-eeae-4fa5-9353-72e217e82dfd\") " pod="openstack/ssh-known-hosts-edpm-deployment-nrsx2" Sep 29 19:38:20 crc kubenswrapper[4779]: I0929 19:38:20.673775 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/a075ace8-eeae-4fa5-9353-72e217e82dfd-ssh-key-openstack-edpm-ipam\") pod \"ssh-known-hosts-edpm-deployment-nrsx2\" (UID: \"a075ace8-eeae-4fa5-9353-72e217e82dfd\") " pod="openstack/ssh-known-hosts-edpm-deployment-nrsx2" Sep 29 19:38:20 crc kubenswrapper[4779]: I0929 19:38:20.775834 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/a075ace8-eeae-4fa5-9353-72e217e82dfd-inventory-0\") pod \"ssh-known-hosts-edpm-deployment-nrsx2\" (UID: \"a075ace8-eeae-4fa5-9353-72e217e82dfd\") " pod="openstack/ssh-known-hosts-edpm-deployment-nrsx2" Sep 29 19:38:20 crc kubenswrapper[4779]: I0929 19:38:20.775919 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-25rcs\" (UniqueName: \"kubernetes.io/projected/a075ace8-eeae-4fa5-9353-72e217e82dfd-kube-api-access-25rcs\") pod \"ssh-known-hosts-edpm-deployment-nrsx2\" (UID: \"a075ace8-eeae-4fa5-9353-72e217e82dfd\") " pod="openstack/ssh-known-hosts-edpm-deployment-nrsx2" Sep 29 19:38:20 crc kubenswrapper[4779]: I0929 19:38:20.775985 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/a075ace8-eeae-4fa5-9353-72e217e82dfd-ssh-key-openstack-edpm-ipam\") pod \"ssh-known-hosts-edpm-deployment-nrsx2\" (UID: \"a075ace8-eeae-4fa5-9353-72e217e82dfd\") " pod="openstack/ssh-known-hosts-edpm-deployment-nrsx2" Sep 29 19:38:20 crc kubenswrapper[4779]: I0929 19:38:20.780914 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/a075ace8-eeae-4fa5-9353-72e217e82dfd-ssh-key-openstack-edpm-ipam\") pod \"ssh-known-hosts-edpm-deployment-nrsx2\" (UID: \"a075ace8-eeae-4fa5-9353-72e217e82dfd\") " pod="openstack/ssh-known-hosts-edpm-deployment-nrsx2" Sep 29 19:38:20 crc kubenswrapper[4779]: I0929 19:38:20.781778 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/a075ace8-eeae-4fa5-9353-72e217e82dfd-inventory-0\") pod \"ssh-known-hosts-edpm-deployment-nrsx2\" (UID: \"a075ace8-eeae-4fa5-9353-72e217e82dfd\") " pod="openstack/ssh-known-hosts-edpm-deployment-nrsx2" Sep 29 19:38:20 crc kubenswrapper[4779]: I0929 19:38:20.797284 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-25rcs\" (UniqueName: \"kubernetes.io/projected/a075ace8-eeae-4fa5-9353-72e217e82dfd-kube-api-access-25rcs\") pod \"ssh-known-hosts-edpm-deployment-nrsx2\" (UID: \"a075ace8-eeae-4fa5-9353-72e217e82dfd\") " pod="openstack/ssh-known-hosts-edpm-deployment-nrsx2" Sep 29 19:38:20 crc kubenswrapper[4779]: I0929 19:38:20.902264 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-nrsx2" Sep 29 19:38:21 crc kubenswrapper[4779]: I0929 19:38:21.467402 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-nrsx2"] Sep 29 19:38:21 crc kubenswrapper[4779]: I0929 19:38:21.508648 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-nrsx2" event={"ID":"a075ace8-eeae-4fa5-9353-72e217e82dfd","Type":"ContainerStarted","Data":"d8f211ce09781b92a170ab20555335782baee547667edd085cda2e0df2f1da5f"} Sep 29 19:38:22 crc kubenswrapper[4779]: I0929 19:38:22.518655 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-nrsx2" event={"ID":"a075ace8-eeae-4fa5-9353-72e217e82dfd","Type":"ContainerStarted","Data":"cc439bff33f01bce26783e58dd73781df5a93658038ae0027569ebe6673a917d"} Sep 29 19:38:22 crc kubenswrapper[4779]: I0929 19:38:22.548364 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ssh-known-hosts-edpm-deployment-nrsx2" podStartSLOduration=2.026816 podStartE2EDuration="2.548340804s" podCreationTimestamp="2025-09-29 19:38:20 +0000 UTC" firstStartedPulling="2025-09-29 19:38:21.478614137 +0000 UTC m=+1812.363039237" lastFinishedPulling="2025-09-29 19:38:22.000138911 +0000 UTC m=+1812.884564041" observedRunningTime="2025-09-29 19:38:22.539469611 +0000 UTC m=+1813.423894711" watchObservedRunningTime="2025-09-29 19:38:22.548340804 +0000 UTC m=+1813.432765894" Sep 29 19:38:29 crc kubenswrapper[4779]: I0929 19:38:29.600023 4779 generic.go:334] "Generic (PLEG): container finished" podID="a075ace8-eeae-4fa5-9353-72e217e82dfd" containerID="cc439bff33f01bce26783e58dd73781df5a93658038ae0027569ebe6673a917d" exitCode=0 Sep 29 19:38:29 crc kubenswrapper[4779]: I0929 19:38:29.600132 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-nrsx2" event={"ID":"a075ace8-eeae-4fa5-9353-72e217e82dfd","Type":"ContainerDied","Data":"cc439bff33f01bce26783e58dd73781df5a93658038ae0027569ebe6673a917d"} Sep 29 19:38:31 crc kubenswrapper[4779]: I0929 19:38:31.057532 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-nrsx2" Sep 29 19:38:31 crc kubenswrapper[4779]: I0929 19:38:31.190629 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/a075ace8-eeae-4fa5-9353-72e217e82dfd-inventory-0\") pod \"a075ace8-eeae-4fa5-9353-72e217e82dfd\" (UID: \"a075ace8-eeae-4fa5-9353-72e217e82dfd\") " Sep 29 19:38:31 crc kubenswrapper[4779]: I0929 19:38:31.190859 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/a075ace8-eeae-4fa5-9353-72e217e82dfd-ssh-key-openstack-edpm-ipam\") pod \"a075ace8-eeae-4fa5-9353-72e217e82dfd\" (UID: \"a075ace8-eeae-4fa5-9353-72e217e82dfd\") " Sep 29 19:38:31 crc kubenswrapper[4779]: I0929 19:38:31.190996 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-25rcs\" (UniqueName: \"kubernetes.io/projected/a075ace8-eeae-4fa5-9353-72e217e82dfd-kube-api-access-25rcs\") pod \"a075ace8-eeae-4fa5-9353-72e217e82dfd\" (UID: \"a075ace8-eeae-4fa5-9353-72e217e82dfd\") " Sep 29 19:38:31 crc kubenswrapper[4779]: I0929 19:38:31.196248 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a075ace8-eeae-4fa5-9353-72e217e82dfd-kube-api-access-25rcs" (OuterVolumeSpecName: "kube-api-access-25rcs") pod "a075ace8-eeae-4fa5-9353-72e217e82dfd" (UID: "a075ace8-eeae-4fa5-9353-72e217e82dfd"). InnerVolumeSpecName "kube-api-access-25rcs". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:38:31 crc kubenswrapper[4779]: I0929 19:38:31.236655 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a075ace8-eeae-4fa5-9353-72e217e82dfd-inventory-0" (OuterVolumeSpecName: "inventory-0") pod "a075ace8-eeae-4fa5-9353-72e217e82dfd" (UID: "a075ace8-eeae-4fa5-9353-72e217e82dfd"). InnerVolumeSpecName "inventory-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:38:31 crc kubenswrapper[4779]: I0929 19:38:31.239016 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a075ace8-eeae-4fa5-9353-72e217e82dfd-ssh-key-openstack-edpm-ipam" (OuterVolumeSpecName: "ssh-key-openstack-edpm-ipam") pod "a075ace8-eeae-4fa5-9353-72e217e82dfd" (UID: "a075ace8-eeae-4fa5-9353-72e217e82dfd"). InnerVolumeSpecName "ssh-key-openstack-edpm-ipam". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:38:31 crc kubenswrapper[4779]: I0929 19:38:31.293431 4779 reconciler_common.go:293] "Volume detached for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/a075ace8-eeae-4fa5-9353-72e217e82dfd-inventory-0\") on node \"crc\" DevicePath \"\"" Sep 29 19:38:31 crc kubenswrapper[4779]: I0929 19:38:31.293474 4779 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/a075ace8-eeae-4fa5-9353-72e217e82dfd-ssh-key-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Sep 29 19:38:31 crc kubenswrapper[4779]: I0929 19:38:31.293490 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-25rcs\" (UniqueName: \"kubernetes.io/projected/a075ace8-eeae-4fa5-9353-72e217e82dfd-kube-api-access-25rcs\") on node \"crc\" DevicePath \"\"" Sep 29 19:38:31 crc kubenswrapper[4779]: I0929 19:38:31.624432 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-nrsx2" Sep 29 19:38:31 crc kubenswrapper[4779]: I0929 19:38:31.624951 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-nrsx2" event={"ID":"a075ace8-eeae-4fa5-9353-72e217e82dfd","Type":"ContainerDied","Data":"d8f211ce09781b92a170ab20555335782baee547667edd085cda2e0df2f1da5f"} Sep 29 19:38:31 crc kubenswrapper[4779]: I0929 19:38:31.625007 4779 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d8f211ce09781b92a170ab20555335782baee547667edd085cda2e0df2f1da5f" Sep 29 19:38:31 crc kubenswrapper[4779]: I0929 19:38:31.730282 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-6j4j9"] Sep 29 19:38:31 crc kubenswrapper[4779]: E0929 19:38:31.731092 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a075ace8-eeae-4fa5-9353-72e217e82dfd" containerName="ssh-known-hosts-edpm-deployment" Sep 29 19:38:31 crc kubenswrapper[4779]: I0929 19:38:31.731139 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="a075ace8-eeae-4fa5-9353-72e217e82dfd" containerName="ssh-known-hosts-edpm-deployment" Sep 29 19:38:31 crc kubenswrapper[4779]: I0929 19:38:31.731648 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="a075ace8-eeae-4fa5-9353-72e217e82dfd" containerName="ssh-known-hosts-edpm-deployment" Sep 29 19:38:31 crc kubenswrapper[4779]: I0929 19:38:31.733011 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-6j4j9" Sep 29 19:38:31 crc kubenswrapper[4779]: I0929 19:38:31.736125 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 29 19:38:31 crc kubenswrapper[4779]: I0929 19:38:31.737071 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Sep 29 19:38:31 crc kubenswrapper[4779]: I0929 19:38:31.737132 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-v8454" Sep 29 19:38:31 crc kubenswrapper[4779]: I0929 19:38:31.737256 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Sep 29 19:38:31 crc kubenswrapper[4779]: I0929 19:38:31.743128 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-6j4j9"] Sep 29 19:38:31 crc kubenswrapper[4779]: I0929 19:38:31.803506 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gsnvs\" (UniqueName: \"kubernetes.io/projected/aceb0e20-3731-4018-947e-40f2193b8c0a-kube-api-access-gsnvs\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-6j4j9\" (UID: \"aceb0e20-3731-4018-947e-40f2193b8c0a\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-6j4j9" Sep 29 19:38:31 crc kubenswrapper[4779]: I0929 19:38:31.803874 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/aceb0e20-3731-4018-947e-40f2193b8c0a-ssh-key\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-6j4j9\" (UID: \"aceb0e20-3731-4018-947e-40f2193b8c0a\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-6j4j9" Sep 29 19:38:31 crc kubenswrapper[4779]: I0929 19:38:31.804017 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/aceb0e20-3731-4018-947e-40f2193b8c0a-inventory\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-6j4j9\" (UID: \"aceb0e20-3731-4018-947e-40f2193b8c0a\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-6j4j9" Sep 29 19:38:31 crc kubenswrapper[4779]: I0929 19:38:31.905850 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gsnvs\" (UniqueName: \"kubernetes.io/projected/aceb0e20-3731-4018-947e-40f2193b8c0a-kube-api-access-gsnvs\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-6j4j9\" (UID: \"aceb0e20-3731-4018-947e-40f2193b8c0a\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-6j4j9" Sep 29 19:38:31 crc kubenswrapper[4779]: I0929 19:38:31.905926 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/aceb0e20-3731-4018-947e-40f2193b8c0a-ssh-key\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-6j4j9\" (UID: \"aceb0e20-3731-4018-947e-40f2193b8c0a\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-6j4j9" Sep 29 19:38:31 crc kubenswrapper[4779]: I0929 19:38:31.906087 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/aceb0e20-3731-4018-947e-40f2193b8c0a-inventory\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-6j4j9\" (UID: \"aceb0e20-3731-4018-947e-40f2193b8c0a\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-6j4j9" Sep 29 19:38:31 crc kubenswrapper[4779]: I0929 19:38:31.911035 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/aceb0e20-3731-4018-947e-40f2193b8c0a-inventory\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-6j4j9\" (UID: \"aceb0e20-3731-4018-947e-40f2193b8c0a\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-6j4j9" Sep 29 19:38:31 crc kubenswrapper[4779]: I0929 19:38:31.913535 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/aceb0e20-3731-4018-947e-40f2193b8c0a-ssh-key\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-6j4j9\" (UID: \"aceb0e20-3731-4018-947e-40f2193b8c0a\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-6j4j9" Sep 29 19:38:31 crc kubenswrapper[4779]: I0929 19:38:31.932233 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gsnvs\" (UniqueName: \"kubernetes.io/projected/aceb0e20-3731-4018-947e-40f2193b8c0a-kube-api-access-gsnvs\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-6j4j9\" (UID: \"aceb0e20-3731-4018-947e-40f2193b8c0a\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-6j4j9" Sep 29 19:38:32 crc kubenswrapper[4779]: I0929 19:38:32.053141 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-6j4j9" Sep 29 19:38:32 crc kubenswrapper[4779]: I0929 19:38:32.560965 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-6j4j9"] Sep 29 19:38:32 crc kubenswrapper[4779]: W0929 19:38:32.568121 4779 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podaceb0e20_3731_4018_947e_40f2193b8c0a.slice/crio-1779236e3566182ebdb818d3b34ed21810c4a7ade43f3da531f1b4de2d58c60b WatchSource:0}: Error finding container 1779236e3566182ebdb818d3b34ed21810c4a7ade43f3da531f1b4de2d58c60b: Status 404 returned error can't find the container with id 1779236e3566182ebdb818d3b34ed21810c4a7ade43f3da531f1b4de2d58c60b Sep 29 19:38:32 crc kubenswrapper[4779]: I0929 19:38:32.634195 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-6j4j9" event={"ID":"aceb0e20-3731-4018-947e-40f2193b8c0a","Type":"ContainerStarted","Data":"1779236e3566182ebdb818d3b34ed21810c4a7ade43f3da531f1b4de2d58c60b"} Sep 29 19:38:32 crc kubenswrapper[4779]: I0929 19:38:32.766887 4779 scope.go:117] "RemoveContainer" containerID="cabb38360f260f953132a0f1382a2873fbc40a1355bf1109d5e07b29c41a2fbb" Sep 29 19:38:32 crc kubenswrapper[4779]: E0929 19:38:32.767394 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d5cnr_openshift-machine-config-operator(476bc421-1113-455e-bcc8-e207e47dad19)\"" pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" podUID="476bc421-1113-455e-bcc8-e207e47dad19" Sep 29 19:38:33 crc kubenswrapper[4779]: I0929 19:38:33.645572 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-6j4j9" event={"ID":"aceb0e20-3731-4018-947e-40f2193b8c0a","Type":"ContainerStarted","Data":"d59c04f1e9b31a2923dbbd4aea4b826667d8538c853b11bf2b7ee9948b3c2afd"} Sep 29 19:38:33 crc kubenswrapper[4779]: I0929 19:38:33.677249 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-6j4j9" podStartSLOduration=2.185818727 podStartE2EDuration="2.677226466s" podCreationTimestamp="2025-09-29 19:38:31 +0000 UTC" firstStartedPulling="2025-09-29 19:38:32.571681278 +0000 UTC m=+1823.456106378" lastFinishedPulling="2025-09-29 19:38:33.063089017 +0000 UTC m=+1823.947514117" observedRunningTime="2025-09-29 19:38:33.663924522 +0000 UTC m=+1824.548349632" watchObservedRunningTime="2025-09-29 19:38:33.677226466 +0000 UTC m=+1824.561651576" Sep 29 19:38:41 crc kubenswrapper[4779]: I0929 19:38:41.732423 4779 generic.go:334] "Generic (PLEG): container finished" podID="aceb0e20-3731-4018-947e-40f2193b8c0a" containerID="d59c04f1e9b31a2923dbbd4aea4b826667d8538c853b11bf2b7ee9948b3c2afd" exitCode=0 Sep 29 19:38:41 crc kubenswrapper[4779]: I0929 19:38:41.732515 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-6j4j9" event={"ID":"aceb0e20-3731-4018-947e-40f2193b8c0a","Type":"ContainerDied","Data":"d59c04f1e9b31a2923dbbd4aea4b826667d8538c853b11bf2b7ee9948b3c2afd"} Sep 29 19:38:43 crc kubenswrapper[4779]: I0929 19:38:43.221253 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-6j4j9" Sep 29 19:38:43 crc kubenswrapper[4779]: I0929 19:38:43.339605 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/aceb0e20-3731-4018-947e-40f2193b8c0a-inventory\") pod \"aceb0e20-3731-4018-947e-40f2193b8c0a\" (UID: \"aceb0e20-3731-4018-947e-40f2193b8c0a\") " Sep 29 19:38:43 crc kubenswrapper[4779]: I0929 19:38:43.339940 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/aceb0e20-3731-4018-947e-40f2193b8c0a-ssh-key\") pod \"aceb0e20-3731-4018-947e-40f2193b8c0a\" (UID: \"aceb0e20-3731-4018-947e-40f2193b8c0a\") " Sep 29 19:38:43 crc kubenswrapper[4779]: I0929 19:38:43.340263 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gsnvs\" (UniqueName: \"kubernetes.io/projected/aceb0e20-3731-4018-947e-40f2193b8c0a-kube-api-access-gsnvs\") pod \"aceb0e20-3731-4018-947e-40f2193b8c0a\" (UID: \"aceb0e20-3731-4018-947e-40f2193b8c0a\") " Sep 29 19:38:43 crc kubenswrapper[4779]: I0929 19:38:43.344777 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/aceb0e20-3731-4018-947e-40f2193b8c0a-kube-api-access-gsnvs" (OuterVolumeSpecName: "kube-api-access-gsnvs") pod "aceb0e20-3731-4018-947e-40f2193b8c0a" (UID: "aceb0e20-3731-4018-947e-40f2193b8c0a"). InnerVolumeSpecName "kube-api-access-gsnvs". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:38:43 crc kubenswrapper[4779]: I0929 19:38:43.368550 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aceb0e20-3731-4018-947e-40f2193b8c0a-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "aceb0e20-3731-4018-947e-40f2193b8c0a" (UID: "aceb0e20-3731-4018-947e-40f2193b8c0a"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:38:43 crc kubenswrapper[4779]: I0929 19:38:43.385355 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aceb0e20-3731-4018-947e-40f2193b8c0a-inventory" (OuterVolumeSpecName: "inventory") pod "aceb0e20-3731-4018-947e-40f2193b8c0a" (UID: "aceb0e20-3731-4018-947e-40f2193b8c0a"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:38:43 crc kubenswrapper[4779]: I0929 19:38:43.442492 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gsnvs\" (UniqueName: \"kubernetes.io/projected/aceb0e20-3731-4018-947e-40f2193b8c0a-kube-api-access-gsnvs\") on node \"crc\" DevicePath \"\"" Sep 29 19:38:43 crc kubenswrapper[4779]: I0929 19:38:43.442529 4779 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/aceb0e20-3731-4018-947e-40f2193b8c0a-inventory\") on node \"crc\" DevicePath \"\"" Sep 29 19:38:43 crc kubenswrapper[4779]: I0929 19:38:43.442539 4779 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/aceb0e20-3731-4018-947e-40f2193b8c0a-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 29 19:38:43 crc kubenswrapper[4779]: I0929 19:38:43.767253 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-6j4j9" Sep 29 19:38:43 crc kubenswrapper[4779]: I0929 19:38:43.790920 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-6j4j9" event={"ID":"aceb0e20-3731-4018-947e-40f2193b8c0a","Type":"ContainerDied","Data":"1779236e3566182ebdb818d3b34ed21810c4a7ade43f3da531f1b4de2d58c60b"} Sep 29 19:38:43 crc kubenswrapper[4779]: I0929 19:38:43.790989 4779 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1779236e3566182ebdb818d3b34ed21810c4a7ade43f3da531f1b4de2d58c60b" Sep 29 19:38:43 crc kubenswrapper[4779]: I0929 19:38:43.831504 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-9k524"] Sep 29 19:38:43 crc kubenswrapper[4779]: E0929 19:38:43.832013 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aceb0e20-3731-4018-947e-40f2193b8c0a" containerName="run-os-edpm-deployment-openstack-edpm-ipam" Sep 29 19:38:43 crc kubenswrapper[4779]: I0929 19:38:43.832037 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="aceb0e20-3731-4018-947e-40f2193b8c0a" containerName="run-os-edpm-deployment-openstack-edpm-ipam" Sep 29 19:38:43 crc kubenswrapper[4779]: I0929 19:38:43.832256 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="aceb0e20-3731-4018-947e-40f2193b8c0a" containerName="run-os-edpm-deployment-openstack-edpm-ipam" Sep 29 19:38:43 crc kubenswrapper[4779]: I0929 19:38:43.832955 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-9k524" Sep 29 19:38:43 crc kubenswrapper[4779]: I0929 19:38:43.840041 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Sep 29 19:38:43 crc kubenswrapper[4779]: I0929 19:38:43.840097 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Sep 29 19:38:43 crc kubenswrapper[4779]: I0929 19:38:43.840233 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-9k524"] Sep 29 19:38:43 crc kubenswrapper[4779]: I0929 19:38:43.840978 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-v8454" Sep 29 19:38:43 crc kubenswrapper[4779]: I0929 19:38:43.842063 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 29 19:38:43 crc kubenswrapper[4779]: I0929 19:38:43.951169 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5lrhz\" (UniqueName: \"kubernetes.io/projected/501fc953-417c-4abf-aafc-9cc25c3ecb23-kube-api-access-5lrhz\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-9k524\" (UID: \"501fc953-417c-4abf-aafc-9cc25c3ecb23\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-9k524" Sep 29 19:38:43 crc kubenswrapper[4779]: I0929 19:38:43.951418 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/501fc953-417c-4abf-aafc-9cc25c3ecb23-ssh-key\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-9k524\" (UID: \"501fc953-417c-4abf-aafc-9cc25c3ecb23\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-9k524" Sep 29 19:38:43 crc kubenswrapper[4779]: I0929 19:38:43.951679 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/501fc953-417c-4abf-aafc-9cc25c3ecb23-inventory\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-9k524\" (UID: \"501fc953-417c-4abf-aafc-9cc25c3ecb23\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-9k524" Sep 29 19:38:44 crc kubenswrapper[4779]: I0929 19:38:44.053310 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5lrhz\" (UniqueName: \"kubernetes.io/projected/501fc953-417c-4abf-aafc-9cc25c3ecb23-kube-api-access-5lrhz\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-9k524\" (UID: \"501fc953-417c-4abf-aafc-9cc25c3ecb23\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-9k524" Sep 29 19:38:44 crc kubenswrapper[4779]: I0929 19:38:44.053472 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/501fc953-417c-4abf-aafc-9cc25c3ecb23-ssh-key\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-9k524\" (UID: \"501fc953-417c-4abf-aafc-9cc25c3ecb23\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-9k524" Sep 29 19:38:44 crc kubenswrapper[4779]: I0929 19:38:44.053579 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/501fc953-417c-4abf-aafc-9cc25c3ecb23-inventory\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-9k524\" (UID: \"501fc953-417c-4abf-aafc-9cc25c3ecb23\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-9k524" Sep 29 19:38:44 crc kubenswrapper[4779]: I0929 19:38:44.058718 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/501fc953-417c-4abf-aafc-9cc25c3ecb23-ssh-key\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-9k524\" (UID: \"501fc953-417c-4abf-aafc-9cc25c3ecb23\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-9k524" Sep 29 19:38:44 crc kubenswrapper[4779]: I0929 19:38:44.063105 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/501fc953-417c-4abf-aafc-9cc25c3ecb23-inventory\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-9k524\" (UID: \"501fc953-417c-4abf-aafc-9cc25c3ecb23\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-9k524" Sep 29 19:38:44 crc kubenswrapper[4779]: I0929 19:38:44.072284 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5lrhz\" (UniqueName: \"kubernetes.io/projected/501fc953-417c-4abf-aafc-9cc25c3ecb23-kube-api-access-5lrhz\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-9k524\" (UID: \"501fc953-417c-4abf-aafc-9cc25c3ecb23\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-9k524" Sep 29 19:38:44 crc kubenswrapper[4779]: I0929 19:38:44.176167 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-9k524" Sep 29 19:38:44 crc kubenswrapper[4779]: I0929 19:38:44.727165 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-9k524"] Sep 29 19:38:44 crc kubenswrapper[4779]: I0929 19:38:44.731196 4779 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Sep 29 19:38:44 crc kubenswrapper[4779]: I0929 19:38:44.767841 4779 scope.go:117] "RemoveContainer" containerID="cabb38360f260f953132a0f1382a2873fbc40a1355bf1109d5e07b29c41a2fbb" Sep 29 19:38:44 crc kubenswrapper[4779]: E0929 19:38:44.768267 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d5cnr_openshift-machine-config-operator(476bc421-1113-455e-bcc8-e207e47dad19)\"" pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" podUID="476bc421-1113-455e-bcc8-e207e47dad19" Sep 29 19:38:44 crc kubenswrapper[4779]: I0929 19:38:44.775656 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-9k524" event={"ID":"501fc953-417c-4abf-aafc-9cc25c3ecb23","Type":"ContainerStarted","Data":"88e850c4fa3a22e69a46d36d1e1dc0c6491d04289cf828566588ae955487ddf8"} Sep 29 19:38:45 crc kubenswrapper[4779]: I0929 19:38:45.785149 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-9k524" event={"ID":"501fc953-417c-4abf-aafc-9cc25c3ecb23","Type":"ContainerStarted","Data":"885aea4dcee4ef8d10be886d20017f817a7e817c775f71b520fa7d1649a2e569"} Sep 29 19:38:45 crc kubenswrapper[4779]: I0929 19:38:45.810069 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-9k524" podStartSLOduration=2.205794325 podStartE2EDuration="2.810041982s" podCreationTimestamp="2025-09-29 19:38:43 +0000 UTC" firstStartedPulling="2025-09-29 19:38:44.730773785 +0000 UTC m=+1835.615198925" lastFinishedPulling="2025-09-29 19:38:45.335021452 +0000 UTC m=+1836.219446582" observedRunningTime="2025-09-29 19:38:45.805252521 +0000 UTC m=+1836.689677631" watchObservedRunningTime="2025-09-29 19:38:45.810041982 +0000 UTC m=+1836.694467112" Sep 29 19:38:55 crc kubenswrapper[4779]: I0929 19:38:55.875826 4779 generic.go:334] "Generic (PLEG): container finished" podID="501fc953-417c-4abf-aafc-9cc25c3ecb23" containerID="885aea4dcee4ef8d10be886d20017f817a7e817c775f71b520fa7d1649a2e569" exitCode=0 Sep 29 19:38:55 crc kubenswrapper[4779]: I0929 19:38:55.875936 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-9k524" event={"ID":"501fc953-417c-4abf-aafc-9cc25c3ecb23","Type":"ContainerDied","Data":"885aea4dcee4ef8d10be886d20017f817a7e817c775f71b520fa7d1649a2e569"} Sep 29 19:38:57 crc kubenswrapper[4779]: I0929 19:38:57.397532 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-9k524" Sep 29 19:38:57 crc kubenswrapper[4779]: I0929 19:38:57.531975 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/501fc953-417c-4abf-aafc-9cc25c3ecb23-ssh-key\") pod \"501fc953-417c-4abf-aafc-9cc25c3ecb23\" (UID: \"501fc953-417c-4abf-aafc-9cc25c3ecb23\") " Sep 29 19:38:57 crc kubenswrapper[4779]: I0929 19:38:57.532095 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5lrhz\" (UniqueName: \"kubernetes.io/projected/501fc953-417c-4abf-aafc-9cc25c3ecb23-kube-api-access-5lrhz\") pod \"501fc953-417c-4abf-aafc-9cc25c3ecb23\" (UID: \"501fc953-417c-4abf-aafc-9cc25c3ecb23\") " Sep 29 19:38:57 crc kubenswrapper[4779]: I0929 19:38:57.532139 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/501fc953-417c-4abf-aafc-9cc25c3ecb23-inventory\") pod \"501fc953-417c-4abf-aafc-9cc25c3ecb23\" (UID: \"501fc953-417c-4abf-aafc-9cc25c3ecb23\") " Sep 29 19:38:57 crc kubenswrapper[4779]: I0929 19:38:57.552829 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/501fc953-417c-4abf-aafc-9cc25c3ecb23-kube-api-access-5lrhz" (OuterVolumeSpecName: "kube-api-access-5lrhz") pod "501fc953-417c-4abf-aafc-9cc25c3ecb23" (UID: "501fc953-417c-4abf-aafc-9cc25c3ecb23"). InnerVolumeSpecName "kube-api-access-5lrhz". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:38:57 crc kubenswrapper[4779]: I0929 19:38:57.561092 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/501fc953-417c-4abf-aafc-9cc25c3ecb23-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "501fc953-417c-4abf-aafc-9cc25c3ecb23" (UID: "501fc953-417c-4abf-aafc-9cc25c3ecb23"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:38:57 crc kubenswrapper[4779]: I0929 19:38:57.561925 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/501fc953-417c-4abf-aafc-9cc25c3ecb23-inventory" (OuterVolumeSpecName: "inventory") pod "501fc953-417c-4abf-aafc-9cc25c3ecb23" (UID: "501fc953-417c-4abf-aafc-9cc25c3ecb23"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:38:57 crc kubenswrapper[4779]: I0929 19:38:57.635902 4779 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/501fc953-417c-4abf-aafc-9cc25c3ecb23-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 29 19:38:57 crc kubenswrapper[4779]: I0929 19:38:57.636004 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5lrhz\" (UniqueName: \"kubernetes.io/projected/501fc953-417c-4abf-aafc-9cc25c3ecb23-kube-api-access-5lrhz\") on node \"crc\" DevicePath \"\"" Sep 29 19:38:57 crc kubenswrapper[4779]: I0929 19:38:57.636022 4779 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/501fc953-417c-4abf-aafc-9cc25c3ecb23-inventory\") on node \"crc\" DevicePath \"\"" Sep 29 19:38:57 crc kubenswrapper[4779]: I0929 19:38:57.902564 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-9k524" event={"ID":"501fc953-417c-4abf-aafc-9cc25c3ecb23","Type":"ContainerDied","Data":"88e850c4fa3a22e69a46d36d1e1dc0c6491d04289cf828566588ae955487ddf8"} Sep 29 19:38:57 crc kubenswrapper[4779]: I0929 19:38:57.902617 4779 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="88e850c4fa3a22e69a46d36d1e1dc0c6491d04289cf828566588ae955487ddf8" Sep 29 19:38:57 crc kubenswrapper[4779]: I0929 19:38:57.902654 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-9k524" Sep 29 19:38:58 crc kubenswrapper[4779]: I0929 19:38:58.009905 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9xmjg"] Sep 29 19:38:58 crc kubenswrapper[4779]: E0929 19:38:58.010368 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="501fc953-417c-4abf-aafc-9cc25c3ecb23" containerName="reboot-os-edpm-deployment-openstack-edpm-ipam" Sep 29 19:38:58 crc kubenswrapper[4779]: I0929 19:38:58.010389 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="501fc953-417c-4abf-aafc-9cc25c3ecb23" containerName="reboot-os-edpm-deployment-openstack-edpm-ipam" Sep 29 19:38:58 crc kubenswrapper[4779]: I0929 19:38:58.010607 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="501fc953-417c-4abf-aafc-9cc25c3ecb23" containerName="reboot-os-edpm-deployment-openstack-edpm-ipam" Sep 29 19:38:58 crc kubenswrapper[4779]: I0929 19:38:58.011383 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9xmjg" Sep 29 19:38:58 crc kubenswrapper[4779]: I0929 19:38:58.014448 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 29 19:38:58 crc kubenswrapper[4779]: I0929 19:38:58.014547 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-libvirt-default-certs-0" Sep 29 19:38:58 crc kubenswrapper[4779]: I0929 19:38:58.014799 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Sep 29 19:38:58 crc kubenswrapper[4779]: I0929 19:38:58.016532 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-telemetry-default-certs-0" Sep 29 19:38:58 crc kubenswrapper[4779]: I0929 19:38:58.022264 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Sep 29 19:38:58 crc kubenswrapper[4779]: I0929 19:38:58.022519 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-ovn-default-certs-0" Sep 29 19:38:58 crc kubenswrapper[4779]: I0929 19:38:58.022925 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-neutron-metadata-default-certs-0" Sep 29 19:38:58 crc kubenswrapper[4779]: I0929 19:38:58.024010 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-v8454" Sep 29 19:38:58 crc kubenswrapper[4779]: I0929 19:38:58.030920 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9xmjg"] Sep 29 19:38:58 crc kubenswrapper[4779]: I0929 19:38:58.146098 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/d00741d2-40ff-4d5c-b697-cc4ac7ed7511-ssh-key\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-9xmjg\" (UID: \"d00741d2-40ff-4d5c-b697-cc4ac7ed7511\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9xmjg" Sep 29 19:38:58 crc kubenswrapper[4779]: I0929 19:38:58.146152 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/d00741d2-40ff-4d5c-b697-cc4ac7ed7511-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-9xmjg\" (UID: \"d00741d2-40ff-4d5c-b697-cc4ac7ed7511\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9xmjg" Sep 29 19:38:58 crc kubenswrapper[4779]: I0929 19:38:58.146305 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d00741d2-40ff-4d5c-b697-cc4ac7ed7511-bootstrap-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-9xmjg\" (UID: \"d00741d2-40ff-4d5c-b697-cc4ac7ed7511\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9xmjg" Sep 29 19:38:58 crc kubenswrapper[4779]: I0929 19:38:58.146406 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/d00741d2-40ff-4d5c-b697-cc4ac7ed7511-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-9xmjg\" (UID: \"d00741d2-40ff-4d5c-b697-cc4ac7ed7511\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9xmjg" Sep 29 19:38:58 crc kubenswrapper[4779]: I0929 19:38:58.146458 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d00741d2-40ff-4d5c-b697-cc4ac7ed7511-neutron-metadata-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-9xmjg\" (UID: \"d00741d2-40ff-4d5c-b697-cc4ac7ed7511\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9xmjg" Sep 29 19:38:58 crc kubenswrapper[4779]: I0929 19:38:58.146511 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d00741d2-40ff-4d5c-b697-cc4ac7ed7511-telemetry-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-9xmjg\" (UID: \"d00741d2-40ff-4d5c-b697-cc4ac7ed7511\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9xmjg" Sep 29 19:38:58 crc kubenswrapper[4779]: I0929 19:38:58.146539 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d00741d2-40ff-4d5c-b697-cc4ac7ed7511-ovn-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-9xmjg\" (UID: \"d00741d2-40ff-4d5c-b697-cc4ac7ed7511\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9xmjg" Sep 29 19:38:58 crc kubenswrapper[4779]: I0929 19:38:58.146565 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/d00741d2-40ff-4d5c-b697-cc4ac7ed7511-openstack-edpm-ipam-telemetry-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-9xmjg\" (UID: \"d00741d2-40ff-4d5c-b697-cc4ac7ed7511\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9xmjg" Sep 29 19:38:58 crc kubenswrapper[4779]: I0929 19:38:58.146601 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/d00741d2-40ff-4d5c-b697-cc4ac7ed7511-openstack-edpm-ipam-ovn-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-9xmjg\" (UID: \"d00741d2-40ff-4d5c-b697-cc4ac7ed7511\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9xmjg" Sep 29 19:38:58 crc kubenswrapper[4779]: I0929 19:38:58.146627 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d00741d2-40ff-4d5c-b697-cc4ac7ed7511-repo-setup-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-9xmjg\" (UID: \"d00741d2-40ff-4d5c-b697-cc4ac7ed7511\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9xmjg" Sep 29 19:38:58 crc kubenswrapper[4779]: I0929 19:38:58.146659 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dzc59\" (UniqueName: \"kubernetes.io/projected/d00741d2-40ff-4d5c-b697-cc4ac7ed7511-kube-api-access-dzc59\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-9xmjg\" (UID: \"d00741d2-40ff-4d5c-b697-cc4ac7ed7511\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9xmjg" Sep 29 19:38:58 crc kubenswrapper[4779]: I0929 19:38:58.146689 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d00741d2-40ff-4d5c-b697-cc4ac7ed7511-libvirt-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-9xmjg\" (UID: \"d00741d2-40ff-4d5c-b697-cc4ac7ed7511\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9xmjg" Sep 29 19:38:58 crc kubenswrapper[4779]: I0929 19:38:58.146741 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d00741d2-40ff-4d5c-b697-cc4ac7ed7511-nova-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-9xmjg\" (UID: \"d00741d2-40ff-4d5c-b697-cc4ac7ed7511\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9xmjg" Sep 29 19:38:58 crc kubenswrapper[4779]: I0929 19:38:58.146784 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d00741d2-40ff-4d5c-b697-cc4ac7ed7511-inventory\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-9xmjg\" (UID: \"d00741d2-40ff-4d5c-b697-cc4ac7ed7511\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9xmjg" Sep 29 19:38:58 crc kubenswrapper[4779]: I0929 19:38:58.247960 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d00741d2-40ff-4d5c-b697-cc4ac7ed7511-nova-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-9xmjg\" (UID: \"d00741d2-40ff-4d5c-b697-cc4ac7ed7511\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9xmjg" Sep 29 19:38:58 crc kubenswrapper[4779]: I0929 19:38:58.248032 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d00741d2-40ff-4d5c-b697-cc4ac7ed7511-inventory\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-9xmjg\" (UID: \"d00741d2-40ff-4d5c-b697-cc4ac7ed7511\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9xmjg" Sep 29 19:38:58 crc kubenswrapper[4779]: I0929 19:38:58.248076 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/d00741d2-40ff-4d5c-b697-cc4ac7ed7511-ssh-key\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-9xmjg\" (UID: \"d00741d2-40ff-4d5c-b697-cc4ac7ed7511\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9xmjg" Sep 29 19:38:58 crc kubenswrapper[4779]: I0929 19:38:58.248111 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/d00741d2-40ff-4d5c-b697-cc4ac7ed7511-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-9xmjg\" (UID: \"d00741d2-40ff-4d5c-b697-cc4ac7ed7511\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9xmjg" Sep 29 19:38:58 crc kubenswrapper[4779]: I0929 19:38:58.248180 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d00741d2-40ff-4d5c-b697-cc4ac7ed7511-bootstrap-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-9xmjg\" (UID: \"d00741d2-40ff-4d5c-b697-cc4ac7ed7511\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9xmjg" Sep 29 19:38:58 crc kubenswrapper[4779]: I0929 19:38:58.248231 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/d00741d2-40ff-4d5c-b697-cc4ac7ed7511-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-9xmjg\" (UID: \"d00741d2-40ff-4d5c-b697-cc4ac7ed7511\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9xmjg" Sep 29 19:38:58 crc kubenswrapper[4779]: I0929 19:38:58.248276 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d00741d2-40ff-4d5c-b697-cc4ac7ed7511-neutron-metadata-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-9xmjg\" (UID: \"d00741d2-40ff-4d5c-b697-cc4ac7ed7511\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9xmjg" Sep 29 19:38:58 crc kubenswrapper[4779]: I0929 19:38:58.248350 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d00741d2-40ff-4d5c-b697-cc4ac7ed7511-telemetry-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-9xmjg\" (UID: \"d00741d2-40ff-4d5c-b697-cc4ac7ed7511\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9xmjg" Sep 29 19:38:58 crc kubenswrapper[4779]: I0929 19:38:58.248389 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d00741d2-40ff-4d5c-b697-cc4ac7ed7511-ovn-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-9xmjg\" (UID: \"d00741d2-40ff-4d5c-b697-cc4ac7ed7511\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9xmjg" Sep 29 19:38:58 crc kubenswrapper[4779]: I0929 19:38:58.248415 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/d00741d2-40ff-4d5c-b697-cc4ac7ed7511-openstack-edpm-ipam-telemetry-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-9xmjg\" (UID: \"d00741d2-40ff-4d5c-b697-cc4ac7ed7511\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9xmjg" Sep 29 19:38:58 crc kubenswrapper[4779]: I0929 19:38:58.248449 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/d00741d2-40ff-4d5c-b697-cc4ac7ed7511-openstack-edpm-ipam-ovn-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-9xmjg\" (UID: \"d00741d2-40ff-4d5c-b697-cc4ac7ed7511\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9xmjg" Sep 29 19:38:58 crc kubenswrapper[4779]: I0929 19:38:58.248474 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d00741d2-40ff-4d5c-b697-cc4ac7ed7511-repo-setup-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-9xmjg\" (UID: \"d00741d2-40ff-4d5c-b697-cc4ac7ed7511\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9xmjg" Sep 29 19:38:58 crc kubenswrapper[4779]: I0929 19:38:58.248502 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dzc59\" (UniqueName: \"kubernetes.io/projected/d00741d2-40ff-4d5c-b697-cc4ac7ed7511-kube-api-access-dzc59\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-9xmjg\" (UID: \"d00741d2-40ff-4d5c-b697-cc4ac7ed7511\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9xmjg" Sep 29 19:38:58 crc kubenswrapper[4779]: I0929 19:38:58.248533 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d00741d2-40ff-4d5c-b697-cc4ac7ed7511-libvirt-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-9xmjg\" (UID: \"d00741d2-40ff-4d5c-b697-cc4ac7ed7511\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9xmjg" Sep 29 19:38:58 crc kubenswrapper[4779]: I0929 19:38:58.252765 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/d00741d2-40ff-4d5c-b697-cc4ac7ed7511-openstack-edpm-ipam-telemetry-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-9xmjg\" (UID: \"d00741d2-40ff-4d5c-b697-cc4ac7ed7511\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9xmjg" Sep 29 19:38:58 crc kubenswrapper[4779]: I0929 19:38:58.253546 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/d00741d2-40ff-4d5c-b697-cc4ac7ed7511-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-9xmjg\" (UID: \"d00741d2-40ff-4d5c-b697-cc4ac7ed7511\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9xmjg" Sep 29 19:38:58 crc kubenswrapper[4779]: I0929 19:38:58.254200 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d00741d2-40ff-4d5c-b697-cc4ac7ed7511-repo-setup-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-9xmjg\" (UID: \"d00741d2-40ff-4d5c-b697-cc4ac7ed7511\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9xmjg" Sep 29 19:38:58 crc kubenswrapper[4779]: I0929 19:38:58.254756 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d00741d2-40ff-4d5c-b697-cc4ac7ed7511-ovn-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-9xmjg\" (UID: \"d00741d2-40ff-4d5c-b697-cc4ac7ed7511\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9xmjg" Sep 29 19:38:58 crc kubenswrapper[4779]: I0929 19:38:58.254916 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d00741d2-40ff-4d5c-b697-cc4ac7ed7511-libvirt-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-9xmjg\" (UID: \"d00741d2-40ff-4d5c-b697-cc4ac7ed7511\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9xmjg" Sep 29 19:38:58 crc kubenswrapper[4779]: I0929 19:38:58.255424 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d00741d2-40ff-4d5c-b697-cc4ac7ed7511-inventory\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-9xmjg\" (UID: \"d00741d2-40ff-4d5c-b697-cc4ac7ed7511\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9xmjg" Sep 29 19:38:58 crc kubenswrapper[4779]: I0929 19:38:58.256252 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d00741d2-40ff-4d5c-b697-cc4ac7ed7511-telemetry-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-9xmjg\" (UID: \"d00741d2-40ff-4d5c-b697-cc4ac7ed7511\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9xmjg" Sep 29 19:38:58 crc kubenswrapper[4779]: I0929 19:38:58.257282 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/d00741d2-40ff-4d5c-b697-cc4ac7ed7511-openstack-edpm-ipam-ovn-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-9xmjg\" (UID: \"d00741d2-40ff-4d5c-b697-cc4ac7ed7511\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9xmjg" Sep 29 19:38:58 crc kubenswrapper[4779]: I0929 19:38:58.257514 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d00741d2-40ff-4d5c-b697-cc4ac7ed7511-bootstrap-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-9xmjg\" (UID: \"d00741d2-40ff-4d5c-b697-cc4ac7ed7511\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9xmjg" Sep 29 19:38:58 crc kubenswrapper[4779]: I0929 19:38:58.259607 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d00741d2-40ff-4d5c-b697-cc4ac7ed7511-nova-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-9xmjg\" (UID: \"d00741d2-40ff-4d5c-b697-cc4ac7ed7511\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9xmjg" Sep 29 19:38:58 crc kubenswrapper[4779]: I0929 19:38:58.259830 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/d00741d2-40ff-4d5c-b697-cc4ac7ed7511-ssh-key\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-9xmjg\" (UID: \"d00741d2-40ff-4d5c-b697-cc4ac7ed7511\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9xmjg" Sep 29 19:38:58 crc kubenswrapper[4779]: I0929 19:38:58.259982 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/d00741d2-40ff-4d5c-b697-cc4ac7ed7511-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-9xmjg\" (UID: \"d00741d2-40ff-4d5c-b697-cc4ac7ed7511\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9xmjg" Sep 29 19:38:58 crc kubenswrapper[4779]: I0929 19:38:58.260497 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d00741d2-40ff-4d5c-b697-cc4ac7ed7511-neutron-metadata-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-9xmjg\" (UID: \"d00741d2-40ff-4d5c-b697-cc4ac7ed7511\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9xmjg" Sep 29 19:38:58 crc kubenswrapper[4779]: I0929 19:38:58.272190 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dzc59\" (UniqueName: \"kubernetes.io/projected/d00741d2-40ff-4d5c-b697-cc4ac7ed7511-kube-api-access-dzc59\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-9xmjg\" (UID: \"d00741d2-40ff-4d5c-b697-cc4ac7ed7511\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9xmjg" Sep 29 19:38:58 crc kubenswrapper[4779]: I0929 19:38:58.335477 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9xmjg" Sep 29 19:38:58 crc kubenswrapper[4779]: I0929 19:38:58.868062 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9xmjg"] Sep 29 19:38:58 crc kubenswrapper[4779]: I0929 19:38:58.912512 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9xmjg" event={"ID":"d00741d2-40ff-4d5c-b697-cc4ac7ed7511","Type":"ContainerStarted","Data":"8ac867818747502b518068ba091f82cf90ccaa95526e4e5ddf57ea6327082905"} Sep 29 19:38:59 crc kubenswrapper[4779]: I0929 19:38:59.771789 4779 scope.go:117] "RemoveContainer" containerID="cabb38360f260f953132a0f1382a2873fbc40a1355bf1109d5e07b29c41a2fbb" Sep 29 19:38:59 crc kubenswrapper[4779]: E0929 19:38:59.772465 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d5cnr_openshift-machine-config-operator(476bc421-1113-455e-bcc8-e207e47dad19)\"" pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" podUID="476bc421-1113-455e-bcc8-e207e47dad19" Sep 29 19:38:59 crc kubenswrapper[4779]: I0929 19:38:59.925337 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9xmjg" event={"ID":"d00741d2-40ff-4d5c-b697-cc4ac7ed7511","Type":"ContainerStarted","Data":"8d00ef08ba1da6c60b022bdd469785fbcf4986fa21870b688f90b5c9073bb373"} Sep 29 19:38:59 crc kubenswrapper[4779]: I0929 19:38:59.950853 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9xmjg" podStartSLOduration=2.530464409 podStartE2EDuration="2.950830724s" podCreationTimestamp="2025-09-29 19:38:57 +0000 UTC" firstStartedPulling="2025-09-29 19:38:58.87827706 +0000 UTC m=+1849.762702160" lastFinishedPulling="2025-09-29 19:38:59.298643375 +0000 UTC m=+1850.183068475" observedRunningTime="2025-09-29 19:38:59.948280264 +0000 UTC m=+1850.832705364" watchObservedRunningTime="2025-09-29 19:38:59.950830724 +0000 UTC m=+1850.835255834" Sep 29 19:39:14 crc kubenswrapper[4779]: I0929 19:39:14.766815 4779 scope.go:117] "RemoveContainer" containerID="cabb38360f260f953132a0f1382a2873fbc40a1355bf1109d5e07b29c41a2fbb" Sep 29 19:39:14 crc kubenswrapper[4779]: E0929 19:39:14.767921 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d5cnr_openshift-machine-config-operator(476bc421-1113-455e-bcc8-e207e47dad19)\"" pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" podUID="476bc421-1113-455e-bcc8-e207e47dad19" Sep 29 19:39:28 crc kubenswrapper[4779]: I0929 19:39:28.767284 4779 scope.go:117] "RemoveContainer" containerID="cabb38360f260f953132a0f1382a2873fbc40a1355bf1109d5e07b29c41a2fbb" Sep 29 19:39:28 crc kubenswrapper[4779]: E0929 19:39:28.768661 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d5cnr_openshift-machine-config-operator(476bc421-1113-455e-bcc8-e207e47dad19)\"" pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" podUID="476bc421-1113-455e-bcc8-e207e47dad19" Sep 29 19:39:40 crc kubenswrapper[4779]: I0929 19:39:40.766124 4779 scope.go:117] "RemoveContainer" containerID="cabb38360f260f953132a0f1382a2873fbc40a1355bf1109d5e07b29c41a2fbb" Sep 29 19:39:40 crc kubenswrapper[4779]: E0929 19:39:40.766786 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d5cnr_openshift-machine-config-operator(476bc421-1113-455e-bcc8-e207e47dad19)\"" pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" podUID="476bc421-1113-455e-bcc8-e207e47dad19" Sep 29 19:39:41 crc kubenswrapper[4779]: I0929 19:39:41.339820 4779 generic.go:334] "Generic (PLEG): container finished" podID="d00741d2-40ff-4d5c-b697-cc4ac7ed7511" containerID="8d00ef08ba1da6c60b022bdd469785fbcf4986fa21870b688f90b5c9073bb373" exitCode=0 Sep 29 19:39:41 crc kubenswrapper[4779]: I0929 19:39:41.339871 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9xmjg" event={"ID":"d00741d2-40ff-4d5c-b697-cc4ac7ed7511","Type":"ContainerDied","Data":"8d00ef08ba1da6c60b022bdd469785fbcf4986fa21870b688f90b5c9073bb373"} Sep 29 19:39:42 crc kubenswrapper[4779]: I0929 19:39:42.787719 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9xmjg" Sep 29 19:39:42 crc kubenswrapper[4779]: I0929 19:39:42.885544 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/d00741d2-40ff-4d5c-b697-cc4ac7ed7511-openstack-edpm-ipam-telemetry-default-certs-0\") pod \"d00741d2-40ff-4d5c-b697-cc4ac7ed7511\" (UID: \"d00741d2-40ff-4d5c-b697-cc4ac7ed7511\") " Sep 29 19:39:42 crc kubenswrapper[4779]: I0929 19:39:42.885597 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dzc59\" (UniqueName: \"kubernetes.io/projected/d00741d2-40ff-4d5c-b697-cc4ac7ed7511-kube-api-access-dzc59\") pod \"d00741d2-40ff-4d5c-b697-cc4ac7ed7511\" (UID: \"d00741d2-40ff-4d5c-b697-cc4ac7ed7511\") " Sep 29 19:39:42 crc kubenswrapper[4779]: I0929 19:39:42.885614 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d00741d2-40ff-4d5c-b697-cc4ac7ed7511-libvirt-combined-ca-bundle\") pod \"d00741d2-40ff-4d5c-b697-cc4ac7ed7511\" (UID: \"d00741d2-40ff-4d5c-b697-cc4ac7ed7511\") " Sep 29 19:39:42 crc kubenswrapper[4779]: I0929 19:39:42.885671 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d00741d2-40ff-4d5c-b697-cc4ac7ed7511-neutron-metadata-combined-ca-bundle\") pod \"d00741d2-40ff-4d5c-b697-cc4ac7ed7511\" (UID: \"d00741d2-40ff-4d5c-b697-cc4ac7ed7511\") " Sep 29 19:39:42 crc kubenswrapper[4779]: I0929 19:39:42.885708 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d00741d2-40ff-4d5c-b697-cc4ac7ed7511-ovn-combined-ca-bundle\") pod \"d00741d2-40ff-4d5c-b697-cc4ac7ed7511\" (UID: \"d00741d2-40ff-4d5c-b697-cc4ac7ed7511\") " Sep 29 19:39:42 crc kubenswrapper[4779]: I0929 19:39:42.885746 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/d00741d2-40ff-4d5c-b697-cc4ac7ed7511-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"d00741d2-40ff-4d5c-b697-cc4ac7ed7511\" (UID: \"d00741d2-40ff-4d5c-b697-cc4ac7ed7511\") " Sep 29 19:39:42 crc kubenswrapper[4779]: I0929 19:39:42.885830 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d00741d2-40ff-4d5c-b697-cc4ac7ed7511-telemetry-combined-ca-bundle\") pod \"d00741d2-40ff-4d5c-b697-cc4ac7ed7511\" (UID: \"d00741d2-40ff-4d5c-b697-cc4ac7ed7511\") " Sep 29 19:39:42 crc kubenswrapper[4779]: I0929 19:39:42.885859 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d00741d2-40ff-4d5c-b697-cc4ac7ed7511-bootstrap-combined-ca-bundle\") pod \"d00741d2-40ff-4d5c-b697-cc4ac7ed7511\" (UID: \"d00741d2-40ff-4d5c-b697-cc4ac7ed7511\") " Sep 29 19:39:42 crc kubenswrapper[4779]: I0929 19:39:42.885875 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d00741d2-40ff-4d5c-b697-cc4ac7ed7511-inventory\") pod \"d00741d2-40ff-4d5c-b697-cc4ac7ed7511\" (UID: \"d00741d2-40ff-4d5c-b697-cc4ac7ed7511\") " Sep 29 19:39:42 crc kubenswrapper[4779]: I0929 19:39:42.885999 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/d00741d2-40ff-4d5c-b697-cc4ac7ed7511-openstack-edpm-ipam-ovn-default-certs-0\") pod \"d00741d2-40ff-4d5c-b697-cc4ac7ed7511\" (UID: \"d00741d2-40ff-4d5c-b697-cc4ac7ed7511\") " Sep 29 19:39:42 crc kubenswrapper[4779]: I0929 19:39:42.886754 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d00741d2-40ff-4d5c-b697-cc4ac7ed7511-repo-setup-combined-ca-bundle\") pod \"d00741d2-40ff-4d5c-b697-cc4ac7ed7511\" (UID: \"d00741d2-40ff-4d5c-b697-cc4ac7ed7511\") " Sep 29 19:39:42 crc kubenswrapper[4779]: I0929 19:39:42.886778 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/d00741d2-40ff-4d5c-b697-cc4ac7ed7511-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"d00741d2-40ff-4d5c-b697-cc4ac7ed7511\" (UID: \"d00741d2-40ff-4d5c-b697-cc4ac7ed7511\") " Sep 29 19:39:42 crc kubenswrapper[4779]: I0929 19:39:42.886798 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d00741d2-40ff-4d5c-b697-cc4ac7ed7511-nova-combined-ca-bundle\") pod \"d00741d2-40ff-4d5c-b697-cc4ac7ed7511\" (UID: \"d00741d2-40ff-4d5c-b697-cc4ac7ed7511\") " Sep 29 19:39:42 crc kubenswrapper[4779]: I0929 19:39:42.886834 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/d00741d2-40ff-4d5c-b697-cc4ac7ed7511-ssh-key\") pod \"d00741d2-40ff-4d5c-b697-cc4ac7ed7511\" (UID: \"d00741d2-40ff-4d5c-b697-cc4ac7ed7511\") " Sep 29 19:39:42 crc kubenswrapper[4779]: I0929 19:39:42.893416 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d00741d2-40ff-4d5c-b697-cc4ac7ed7511-kube-api-access-dzc59" (OuterVolumeSpecName: "kube-api-access-dzc59") pod "d00741d2-40ff-4d5c-b697-cc4ac7ed7511" (UID: "d00741d2-40ff-4d5c-b697-cc4ac7ed7511"). InnerVolumeSpecName "kube-api-access-dzc59". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:39:42 crc kubenswrapper[4779]: I0929 19:39:42.893481 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d00741d2-40ff-4d5c-b697-cc4ac7ed7511-openstack-edpm-ipam-neutron-metadata-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-neutron-metadata-default-certs-0") pod "d00741d2-40ff-4d5c-b697-cc4ac7ed7511" (UID: "d00741d2-40ff-4d5c-b697-cc4ac7ed7511"). InnerVolumeSpecName "openstack-edpm-ipam-neutron-metadata-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:39:42 crc kubenswrapper[4779]: I0929 19:39:42.893570 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d00741d2-40ff-4d5c-b697-cc4ac7ed7511-bootstrap-combined-ca-bundle" (OuterVolumeSpecName: "bootstrap-combined-ca-bundle") pod "d00741d2-40ff-4d5c-b697-cc4ac7ed7511" (UID: "d00741d2-40ff-4d5c-b697-cc4ac7ed7511"). InnerVolumeSpecName "bootstrap-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:39:42 crc kubenswrapper[4779]: I0929 19:39:42.894079 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d00741d2-40ff-4d5c-b697-cc4ac7ed7511-telemetry-combined-ca-bundle" (OuterVolumeSpecName: "telemetry-combined-ca-bundle") pod "d00741d2-40ff-4d5c-b697-cc4ac7ed7511" (UID: "d00741d2-40ff-4d5c-b697-cc4ac7ed7511"). InnerVolumeSpecName "telemetry-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:39:42 crc kubenswrapper[4779]: I0929 19:39:42.894265 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d00741d2-40ff-4d5c-b697-cc4ac7ed7511-neutron-metadata-combined-ca-bundle" (OuterVolumeSpecName: "neutron-metadata-combined-ca-bundle") pod "d00741d2-40ff-4d5c-b697-cc4ac7ed7511" (UID: "d00741d2-40ff-4d5c-b697-cc4ac7ed7511"). InnerVolumeSpecName "neutron-metadata-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:39:42 crc kubenswrapper[4779]: I0929 19:39:42.895194 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d00741d2-40ff-4d5c-b697-cc4ac7ed7511-nova-combined-ca-bundle" (OuterVolumeSpecName: "nova-combined-ca-bundle") pod "d00741d2-40ff-4d5c-b697-cc4ac7ed7511" (UID: "d00741d2-40ff-4d5c-b697-cc4ac7ed7511"). InnerVolumeSpecName "nova-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:39:42 crc kubenswrapper[4779]: I0929 19:39:42.896899 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d00741d2-40ff-4d5c-b697-cc4ac7ed7511-openstack-edpm-ipam-libvirt-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-libvirt-default-certs-0") pod "d00741d2-40ff-4d5c-b697-cc4ac7ed7511" (UID: "d00741d2-40ff-4d5c-b697-cc4ac7ed7511"). InnerVolumeSpecName "openstack-edpm-ipam-libvirt-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:39:42 crc kubenswrapper[4779]: I0929 19:39:42.904356 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d00741d2-40ff-4d5c-b697-cc4ac7ed7511-libvirt-combined-ca-bundle" (OuterVolumeSpecName: "libvirt-combined-ca-bundle") pod "d00741d2-40ff-4d5c-b697-cc4ac7ed7511" (UID: "d00741d2-40ff-4d5c-b697-cc4ac7ed7511"). InnerVolumeSpecName "libvirt-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:39:42 crc kubenswrapper[4779]: I0929 19:39:42.904788 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d00741d2-40ff-4d5c-b697-cc4ac7ed7511-ovn-combined-ca-bundle" (OuterVolumeSpecName: "ovn-combined-ca-bundle") pod "d00741d2-40ff-4d5c-b697-cc4ac7ed7511" (UID: "d00741d2-40ff-4d5c-b697-cc4ac7ed7511"). InnerVolumeSpecName "ovn-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:39:42 crc kubenswrapper[4779]: I0929 19:39:42.906186 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d00741d2-40ff-4d5c-b697-cc4ac7ed7511-repo-setup-combined-ca-bundle" (OuterVolumeSpecName: "repo-setup-combined-ca-bundle") pod "d00741d2-40ff-4d5c-b697-cc4ac7ed7511" (UID: "d00741d2-40ff-4d5c-b697-cc4ac7ed7511"). InnerVolumeSpecName "repo-setup-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:39:42 crc kubenswrapper[4779]: I0929 19:39:42.907178 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d00741d2-40ff-4d5c-b697-cc4ac7ed7511-openstack-edpm-ipam-ovn-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-ovn-default-certs-0") pod "d00741d2-40ff-4d5c-b697-cc4ac7ed7511" (UID: "d00741d2-40ff-4d5c-b697-cc4ac7ed7511"). InnerVolumeSpecName "openstack-edpm-ipam-ovn-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:39:42 crc kubenswrapper[4779]: I0929 19:39:42.917534 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d00741d2-40ff-4d5c-b697-cc4ac7ed7511-openstack-edpm-ipam-telemetry-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-telemetry-default-certs-0") pod "d00741d2-40ff-4d5c-b697-cc4ac7ed7511" (UID: "d00741d2-40ff-4d5c-b697-cc4ac7ed7511"). InnerVolumeSpecName "openstack-edpm-ipam-telemetry-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:39:42 crc kubenswrapper[4779]: I0929 19:39:42.932286 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d00741d2-40ff-4d5c-b697-cc4ac7ed7511-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "d00741d2-40ff-4d5c-b697-cc4ac7ed7511" (UID: "d00741d2-40ff-4d5c-b697-cc4ac7ed7511"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:39:42 crc kubenswrapper[4779]: I0929 19:39:42.938490 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d00741d2-40ff-4d5c-b697-cc4ac7ed7511-inventory" (OuterVolumeSpecName: "inventory") pod "d00741d2-40ff-4d5c-b697-cc4ac7ed7511" (UID: "d00741d2-40ff-4d5c-b697-cc4ac7ed7511"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:39:42 crc kubenswrapper[4779]: I0929 19:39:42.989749 4779 reconciler_common.go:293] "Volume detached for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d00741d2-40ff-4d5c-b697-cc4ac7ed7511-bootstrap-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 19:39:42 crc kubenswrapper[4779]: I0929 19:39:42.989809 4779 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d00741d2-40ff-4d5c-b697-cc4ac7ed7511-inventory\") on node \"crc\" DevicePath \"\"" Sep 29 19:39:42 crc kubenswrapper[4779]: I0929 19:39:42.989829 4779 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/d00741d2-40ff-4d5c-b697-cc4ac7ed7511-openstack-edpm-ipam-ovn-default-certs-0\") on node \"crc\" DevicePath \"\"" Sep 29 19:39:42 crc kubenswrapper[4779]: I0929 19:39:42.989849 4779 reconciler_common.go:293] "Volume detached for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d00741d2-40ff-4d5c-b697-cc4ac7ed7511-repo-setup-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 19:39:42 crc kubenswrapper[4779]: I0929 19:39:42.989871 4779 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/d00741d2-40ff-4d5c-b697-cc4ac7ed7511-openstack-edpm-ipam-neutron-metadata-default-certs-0\") on node \"crc\" DevicePath \"\"" Sep 29 19:39:42 crc kubenswrapper[4779]: I0929 19:39:42.989891 4779 reconciler_common.go:293] "Volume detached for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d00741d2-40ff-4d5c-b697-cc4ac7ed7511-nova-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 19:39:42 crc kubenswrapper[4779]: I0929 19:39:42.989909 4779 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/d00741d2-40ff-4d5c-b697-cc4ac7ed7511-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 29 19:39:42 crc kubenswrapper[4779]: I0929 19:39:42.989929 4779 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/d00741d2-40ff-4d5c-b697-cc4ac7ed7511-openstack-edpm-ipam-telemetry-default-certs-0\") on node \"crc\" DevicePath \"\"" Sep 29 19:39:42 crc kubenswrapper[4779]: I0929 19:39:42.989949 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dzc59\" (UniqueName: \"kubernetes.io/projected/d00741d2-40ff-4d5c-b697-cc4ac7ed7511-kube-api-access-dzc59\") on node \"crc\" DevicePath \"\"" Sep 29 19:39:42 crc kubenswrapper[4779]: I0929 19:39:42.989967 4779 reconciler_common.go:293] "Volume detached for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d00741d2-40ff-4d5c-b697-cc4ac7ed7511-libvirt-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 19:39:42 crc kubenswrapper[4779]: I0929 19:39:42.989984 4779 reconciler_common.go:293] "Volume detached for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d00741d2-40ff-4d5c-b697-cc4ac7ed7511-neutron-metadata-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 19:39:42 crc kubenswrapper[4779]: I0929 19:39:42.990002 4779 reconciler_common.go:293] "Volume detached for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d00741d2-40ff-4d5c-b697-cc4ac7ed7511-ovn-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 19:39:42 crc kubenswrapper[4779]: I0929 19:39:42.990020 4779 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/d00741d2-40ff-4d5c-b697-cc4ac7ed7511-openstack-edpm-ipam-libvirt-default-certs-0\") on node \"crc\" DevicePath \"\"" Sep 29 19:39:42 crc kubenswrapper[4779]: I0929 19:39:42.990059 4779 reconciler_common.go:293] "Volume detached for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d00741d2-40ff-4d5c-b697-cc4ac7ed7511-telemetry-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 19:39:43 crc kubenswrapper[4779]: I0929 19:39:43.365476 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9xmjg" event={"ID":"d00741d2-40ff-4d5c-b697-cc4ac7ed7511","Type":"ContainerDied","Data":"8ac867818747502b518068ba091f82cf90ccaa95526e4e5ddf57ea6327082905"} Sep 29 19:39:43 crc kubenswrapper[4779]: I0929 19:39:43.365522 4779 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8ac867818747502b518068ba091f82cf90ccaa95526e4e5ddf57ea6327082905" Sep 29 19:39:43 crc kubenswrapper[4779]: I0929 19:39:43.365595 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9xmjg" Sep 29 19:39:43 crc kubenswrapper[4779]: I0929 19:39:43.524984 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-edpm-deployment-openstack-edpm-ipam-626r9"] Sep 29 19:39:43 crc kubenswrapper[4779]: E0929 19:39:43.525553 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d00741d2-40ff-4d5c-b697-cc4ac7ed7511" containerName="install-certs-edpm-deployment-openstack-edpm-ipam" Sep 29 19:39:43 crc kubenswrapper[4779]: I0929 19:39:43.525576 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="d00741d2-40ff-4d5c-b697-cc4ac7ed7511" containerName="install-certs-edpm-deployment-openstack-edpm-ipam" Sep 29 19:39:43 crc kubenswrapper[4779]: I0929 19:39:43.525745 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="d00741d2-40ff-4d5c-b697-cc4ac7ed7511" containerName="install-certs-edpm-deployment-openstack-edpm-ipam" Sep 29 19:39:43 crc kubenswrapper[4779]: I0929 19:39:43.526356 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-626r9" Sep 29 19:39:43 crc kubenswrapper[4779]: I0929 19:39:43.529722 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-config" Sep 29 19:39:43 crc kubenswrapper[4779]: I0929 19:39:43.529759 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Sep 29 19:39:43 crc kubenswrapper[4779]: I0929 19:39:43.529808 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 29 19:39:43 crc kubenswrapper[4779]: I0929 19:39:43.529804 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-v8454" Sep 29 19:39:43 crc kubenswrapper[4779]: I0929 19:39:43.533215 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Sep 29 19:39:43 crc kubenswrapper[4779]: I0929 19:39:43.537392 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-edpm-deployment-openstack-edpm-ipam-626r9"] Sep 29 19:39:43 crc kubenswrapper[4779]: I0929 19:39:43.602423 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/0b9b5a28-3a66-4041-a143-8c8a40b27ef4-inventory\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-626r9\" (UID: \"0b9b5a28-3a66-4041-a143-8c8a40b27ef4\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-626r9" Sep 29 19:39:43 crc kubenswrapper[4779]: I0929 19:39:43.602530 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/0b9b5a28-3a66-4041-a143-8c8a40b27ef4-ovncontroller-config-0\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-626r9\" (UID: \"0b9b5a28-3a66-4041-a143-8c8a40b27ef4\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-626r9" Sep 29 19:39:43 crc kubenswrapper[4779]: I0929 19:39:43.602562 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-68lgq\" (UniqueName: \"kubernetes.io/projected/0b9b5a28-3a66-4041-a143-8c8a40b27ef4-kube-api-access-68lgq\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-626r9\" (UID: \"0b9b5a28-3a66-4041-a143-8c8a40b27ef4\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-626r9" Sep 29 19:39:43 crc kubenswrapper[4779]: I0929 19:39:43.602616 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/0b9b5a28-3a66-4041-a143-8c8a40b27ef4-ssh-key\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-626r9\" (UID: \"0b9b5a28-3a66-4041-a143-8c8a40b27ef4\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-626r9" Sep 29 19:39:43 crc kubenswrapper[4779]: I0929 19:39:43.602634 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0b9b5a28-3a66-4041-a143-8c8a40b27ef4-ovn-combined-ca-bundle\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-626r9\" (UID: \"0b9b5a28-3a66-4041-a143-8c8a40b27ef4\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-626r9" Sep 29 19:39:43 crc kubenswrapper[4779]: I0929 19:39:43.704628 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/0b9b5a28-3a66-4041-a143-8c8a40b27ef4-ssh-key\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-626r9\" (UID: \"0b9b5a28-3a66-4041-a143-8c8a40b27ef4\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-626r9" Sep 29 19:39:43 crc kubenswrapper[4779]: I0929 19:39:43.704696 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0b9b5a28-3a66-4041-a143-8c8a40b27ef4-ovn-combined-ca-bundle\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-626r9\" (UID: \"0b9b5a28-3a66-4041-a143-8c8a40b27ef4\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-626r9" Sep 29 19:39:43 crc kubenswrapper[4779]: I0929 19:39:43.704772 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/0b9b5a28-3a66-4041-a143-8c8a40b27ef4-inventory\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-626r9\" (UID: \"0b9b5a28-3a66-4041-a143-8c8a40b27ef4\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-626r9" Sep 29 19:39:43 crc kubenswrapper[4779]: I0929 19:39:43.704877 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/0b9b5a28-3a66-4041-a143-8c8a40b27ef4-ovncontroller-config-0\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-626r9\" (UID: \"0b9b5a28-3a66-4041-a143-8c8a40b27ef4\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-626r9" Sep 29 19:39:43 crc kubenswrapper[4779]: I0929 19:39:43.704911 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-68lgq\" (UniqueName: \"kubernetes.io/projected/0b9b5a28-3a66-4041-a143-8c8a40b27ef4-kube-api-access-68lgq\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-626r9\" (UID: \"0b9b5a28-3a66-4041-a143-8c8a40b27ef4\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-626r9" Sep 29 19:39:43 crc kubenswrapper[4779]: I0929 19:39:43.705807 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/0b9b5a28-3a66-4041-a143-8c8a40b27ef4-ovncontroller-config-0\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-626r9\" (UID: \"0b9b5a28-3a66-4041-a143-8c8a40b27ef4\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-626r9" Sep 29 19:39:43 crc kubenswrapper[4779]: I0929 19:39:43.710275 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0b9b5a28-3a66-4041-a143-8c8a40b27ef4-ovn-combined-ca-bundle\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-626r9\" (UID: \"0b9b5a28-3a66-4041-a143-8c8a40b27ef4\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-626r9" Sep 29 19:39:43 crc kubenswrapper[4779]: I0929 19:39:43.716000 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/0b9b5a28-3a66-4041-a143-8c8a40b27ef4-ssh-key\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-626r9\" (UID: \"0b9b5a28-3a66-4041-a143-8c8a40b27ef4\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-626r9" Sep 29 19:39:43 crc kubenswrapper[4779]: I0929 19:39:43.727868 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/0b9b5a28-3a66-4041-a143-8c8a40b27ef4-inventory\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-626r9\" (UID: \"0b9b5a28-3a66-4041-a143-8c8a40b27ef4\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-626r9" Sep 29 19:39:43 crc kubenswrapper[4779]: I0929 19:39:43.743342 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-68lgq\" (UniqueName: \"kubernetes.io/projected/0b9b5a28-3a66-4041-a143-8c8a40b27ef4-kube-api-access-68lgq\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-626r9\" (UID: \"0b9b5a28-3a66-4041-a143-8c8a40b27ef4\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-626r9" Sep 29 19:39:43 crc kubenswrapper[4779]: I0929 19:39:43.851492 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-626r9" Sep 29 19:39:44 crc kubenswrapper[4779]: I0929 19:39:44.399553 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-edpm-deployment-openstack-edpm-ipam-626r9"] Sep 29 19:39:45 crc kubenswrapper[4779]: I0929 19:39:45.387012 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-626r9" event={"ID":"0b9b5a28-3a66-4041-a143-8c8a40b27ef4","Type":"ContainerStarted","Data":"63b49d27300e9e8ee980306d43445a4e994591af1e07f262ea6570c506859245"} Sep 29 19:39:45 crc kubenswrapper[4779]: I0929 19:39:45.387374 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-626r9" event={"ID":"0b9b5a28-3a66-4041-a143-8c8a40b27ef4","Type":"ContainerStarted","Data":"ef19d4d4b0bc359d7c6ddfd179c8ae18338b566f0bc80a7527772fb436de5aa1"} Sep 29 19:39:45 crc kubenswrapper[4779]: I0929 19:39:45.407257 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-626r9" podStartSLOduration=1.890985409 podStartE2EDuration="2.407217257s" podCreationTimestamp="2025-09-29 19:39:43 +0000 UTC" firstStartedPulling="2025-09-29 19:39:44.39668811 +0000 UTC m=+1895.281113210" lastFinishedPulling="2025-09-29 19:39:44.912919948 +0000 UTC m=+1895.797345058" observedRunningTime="2025-09-29 19:39:45.404550614 +0000 UTC m=+1896.288975724" watchObservedRunningTime="2025-09-29 19:39:45.407217257 +0000 UTC m=+1896.291642367" Sep 29 19:39:53 crc kubenswrapper[4779]: I0929 19:39:53.766441 4779 scope.go:117] "RemoveContainer" containerID="cabb38360f260f953132a0f1382a2873fbc40a1355bf1109d5e07b29c41a2fbb" Sep 29 19:39:53 crc kubenswrapper[4779]: E0929 19:39:53.767496 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d5cnr_openshift-machine-config-operator(476bc421-1113-455e-bcc8-e207e47dad19)\"" pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" podUID="476bc421-1113-455e-bcc8-e207e47dad19" Sep 29 19:40:04 crc kubenswrapper[4779]: I0929 19:40:04.766294 4779 scope.go:117] "RemoveContainer" containerID="cabb38360f260f953132a0f1382a2873fbc40a1355bf1109d5e07b29c41a2fbb" Sep 29 19:40:04 crc kubenswrapper[4779]: E0929 19:40:04.767303 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d5cnr_openshift-machine-config-operator(476bc421-1113-455e-bcc8-e207e47dad19)\"" pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" podUID="476bc421-1113-455e-bcc8-e207e47dad19" Sep 29 19:40:17 crc kubenswrapper[4779]: I0929 19:40:17.766848 4779 scope.go:117] "RemoveContainer" containerID="cabb38360f260f953132a0f1382a2873fbc40a1355bf1109d5e07b29c41a2fbb" Sep 29 19:40:17 crc kubenswrapper[4779]: E0929 19:40:17.771786 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d5cnr_openshift-machine-config-operator(476bc421-1113-455e-bcc8-e207e47dad19)\"" pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" podUID="476bc421-1113-455e-bcc8-e207e47dad19" Sep 29 19:40:32 crc kubenswrapper[4779]: I0929 19:40:32.766343 4779 scope.go:117] "RemoveContainer" containerID="cabb38360f260f953132a0f1382a2873fbc40a1355bf1109d5e07b29c41a2fbb" Sep 29 19:40:32 crc kubenswrapper[4779]: E0929 19:40:32.767308 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d5cnr_openshift-machine-config-operator(476bc421-1113-455e-bcc8-e207e47dad19)\"" pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" podUID="476bc421-1113-455e-bcc8-e207e47dad19" Sep 29 19:40:44 crc kubenswrapper[4779]: I0929 19:40:44.766725 4779 scope.go:117] "RemoveContainer" containerID="cabb38360f260f953132a0f1382a2873fbc40a1355bf1109d5e07b29c41a2fbb" Sep 29 19:40:44 crc kubenswrapper[4779]: E0929 19:40:44.767586 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d5cnr_openshift-machine-config-operator(476bc421-1113-455e-bcc8-e207e47dad19)\"" pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" podUID="476bc421-1113-455e-bcc8-e207e47dad19" Sep 29 19:40:53 crc kubenswrapper[4779]: I0929 19:40:53.062665 4779 generic.go:334] "Generic (PLEG): container finished" podID="0b9b5a28-3a66-4041-a143-8c8a40b27ef4" containerID="63b49d27300e9e8ee980306d43445a4e994591af1e07f262ea6570c506859245" exitCode=0 Sep 29 19:40:53 crc kubenswrapper[4779]: I0929 19:40:53.062740 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-626r9" event={"ID":"0b9b5a28-3a66-4041-a143-8c8a40b27ef4","Type":"ContainerDied","Data":"63b49d27300e9e8ee980306d43445a4e994591af1e07f262ea6570c506859245"} Sep 29 19:40:54 crc kubenswrapper[4779]: I0929 19:40:54.505502 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-626r9" Sep 29 19:40:54 crc kubenswrapper[4779]: I0929 19:40:54.593090 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/0b9b5a28-3a66-4041-a143-8c8a40b27ef4-inventory\") pod \"0b9b5a28-3a66-4041-a143-8c8a40b27ef4\" (UID: \"0b9b5a28-3a66-4041-a143-8c8a40b27ef4\") " Sep 29 19:40:54 crc kubenswrapper[4779]: I0929 19:40:54.593166 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-68lgq\" (UniqueName: \"kubernetes.io/projected/0b9b5a28-3a66-4041-a143-8c8a40b27ef4-kube-api-access-68lgq\") pod \"0b9b5a28-3a66-4041-a143-8c8a40b27ef4\" (UID: \"0b9b5a28-3a66-4041-a143-8c8a40b27ef4\") " Sep 29 19:40:54 crc kubenswrapper[4779]: I0929 19:40:54.593270 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0b9b5a28-3a66-4041-a143-8c8a40b27ef4-ovn-combined-ca-bundle\") pod \"0b9b5a28-3a66-4041-a143-8c8a40b27ef4\" (UID: \"0b9b5a28-3a66-4041-a143-8c8a40b27ef4\") " Sep 29 19:40:54 crc kubenswrapper[4779]: I0929 19:40:54.593357 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/0b9b5a28-3a66-4041-a143-8c8a40b27ef4-ssh-key\") pod \"0b9b5a28-3a66-4041-a143-8c8a40b27ef4\" (UID: \"0b9b5a28-3a66-4041-a143-8c8a40b27ef4\") " Sep 29 19:40:54 crc kubenswrapper[4779]: I0929 19:40:54.593389 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/0b9b5a28-3a66-4041-a143-8c8a40b27ef4-ovncontroller-config-0\") pod \"0b9b5a28-3a66-4041-a143-8c8a40b27ef4\" (UID: \"0b9b5a28-3a66-4041-a143-8c8a40b27ef4\") " Sep 29 19:40:54 crc kubenswrapper[4779]: I0929 19:40:54.599069 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b9b5a28-3a66-4041-a143-8c8a40b27ef4-ovn-combined-ca-bundle" (OuterVolumeSpecName: "ovn-combined-ca-bundle") pod "0b9b5a28-3a66-4041-a143-8c8a40b27ef4" (UID: "0b9b5a28-3a66-4041-a143-8c8a40b27ef4"). InnerVolumeSpecName "ovn-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:40:54 crc kubenswrapper[4779]: I0929 19:40:54.600114 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b9b5a28-3a66-4041-a143-8c8a40b27ef4-kube-api-access-68lgq" (OuterVolumeSpecName: "kube-api-access-68lgq") pod "0b9b5a28-3a66-4041-a143-8c8a40b27ef4" (UID: "0b9b5a28-3a66-4041-a143-8c8a40b27ef4"). InnerVolumeSpecName "kube-api-access-68lgq". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:40:54 crc kubenswrapper[4779]: I0929 19:40:54.619501 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b9b5a28-3a66-4041-a143-8c8a40b27ef4-inventory" (OuterVolumeSpecName: "inventory") pod "0b9b5a28-3a66-4041-a143-8c8a40b27ef4" (UID: "0b9b5a28-3a66-4041-a143-8c8a40b27ef4"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:40:54 crc kubenswrapper[4779]: I0929 19:40:54.625127 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b9b5a28-3a66-4041-a143-8c8a40b27ef4-ovncontroller-config-0" (OuterVolumeSpecName: "ovncontroller-config-0") pod "0b9b5a28-3a66-4041-a143-8c8a40b27ef4" (UID: "0b9b5a28-3a66-4041-a143-8c8a40b27ef4"). InnerVolumeSpecName "ovncontroller-config-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:40:54 crc kubenswrapper[4779]: I0929 19:40:54.647168 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b9b5a28-3a66-4041-a143-8c8a40b27ef4-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "0b9b5a28-3a66-4041-a143-8c8a40b27ef4" (UID: "0b9b5a28-3a66-4041-a143-8c8a40b27ef4"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:40:54 crc kubenswrapper[4779]: I0929 19:40:54.695684 4779 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/0b9b5a28-3a66-4041-a143-8c8a40b27ef4-inventory\") on node \"crc\" DevicePath \"\"" Sep 29 19:40:54 crc kubenswrapper[4779]: I0929 19:40:54.695729 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-68lgq\" (UniqueName: \"kubernetes.io/projected/0b9b5a28-3a66-4041-a143-8c8a40b27ef4-kube-api-access-68lgq\") on node \"crc\" DevicePath \"\"" Sep 29 19:40:54 crc kubenswrapper[4779]: I0929 19:40:54.695745 4779 reconciler_common.go:293] "Volume detached for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0b9b5a28-3a66-4041-a143-8c8a40b27ef4-ovn-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 19:40:54 crc kubenswrapper[4779]: I0929 19:40:54.695756 4779 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/0b9b5a28-3a66-4041-a143-8c8a40b27ef4-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 29 19:40:54 crc kubenswrapper[4779]: I0929 19:40:54.695768 4779 reconciler_common.go:293] "Volume detached for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/0b9b5a28-3a66-4041-a143-8c8a40b27ef4-ovncontroller-config-0\") on node \"crc\" DevicePath \"\"" Sep 29 19:40:55 crc kubenswrapper[4779]: I0929 19:40:55.086037 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-626r9" event={"ID":"0b9b5a28-3a66-4041-a143-8c8a40b27ef4","Type":"ContainerDied","Data":"ef19d4d4b0bc359d7c6ddfd179c8ae18338b566f0bc80a7527772fb436de5aa1"} Sep 29 19:40:55 crc kubenswrapper[4779]: I0929 19:40:55.086088 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-626r9" Sep 29 19:40:55 crc kubenswrapper[4779]: I0929 19:40:55.086102 4779 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ef19d4d4b0bc359d7c6ddfd179c8ae18338b566f0bc80a7527772fb436de5aa1" Sep 29 19:40:55 crc kubenswrapper[4779]: I0929 19:40:55.197059 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-fkq7j"] Sep 29 19:40:55 crc kubenswrapper[4779]: E0929 19:40:55.197906 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0b9b5a28-3a66-4041-a143-8c8a40b27ef4" containerName="ovn-edpm-deployment-openstack-edpm-ipam" Sep 29 19:40:55 crc kubenswrapper[4779]: I0929 19:40:55.197928 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="0b9b5a28-3a66-4041-a143-8c8a40b27ef4" containerName="ovn-edpm-deployment-openstack-edpm-ipam" Sep 29 19:40:55 crc kubenswrapper[4779]: I0929 19:40:55.198155 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="0b9b5a28-3a66-4041-a143-8c8a40b27ef4" containerName="ovn-edpm-deployment-openstack-edpm-ipam" Sep 29 19:40:55 crc kubenswrapper[4779]: I0929 19:40:55.199002 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-fkq7j" Sep 29 19:40:55 crc kubenswrapper[4779]: I0929 19:40:55.201594 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-ovn-metadata-agent-neutron-config" Sep 29 19:40:55 crc kubenswrapper[4779]: I0929 19:40:55.201614 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-v8454" Sep 29 19:40:55 crc kubenswrapper[4779]: I0929 19:40:55.201827 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Sep 29 19:40:55 crc kubenswrapper[4779]: I0929 19:40:55.201938 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-neutron-config" Sep 29 19:40:55 crc kubenswrapper[4779]: I0929 19:40:55.207428 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-fkq7j"] Sep 29 19:40:55 crc kubenswrapper[4779]: I0929 19:40:55.208631 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 29 19:40:55 crc kubenswrapper[4779]: I0929 19:40:55.208864 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Sep 29 19:40:55 crc kubenswrapper[4779]: I0929 19:40:55.307049 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b17f52c4-7329-4262-87d2-d5ef94e88f28-inventory\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-fkq7j\" (UID: \"b17f52c4-7329-4262-87d2-d5ef94e88f28\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-fkq7j" Sep 29 19:40:55 crc kubenswrapper[4779]: I0929 19:40:55.307126 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/b17f52c4-7329-4262-87d2-d5ef94e88f28-ssh-key\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-fkq7j\" (UID: \"b17f52c4-7329-4262-87d2-d5ef94e88f28\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-fkq7j" Sep 29 19:40:55 crc kubenswrapper[4779]: I0929 19:40:55.307345 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/b17f52c4-7329-4262-87d2-d5ef94e88f28-nova-metadata-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-fkq7j\" (UID: \"b17f52c4-7329-4262-87d2-d5ef94e88f28\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-fkq7j" Sep 29 19:40:55 crc kubenswrapper[4779]: I0929 19:40:55.307600 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/b17f52c4-7329-4262-87d2-d5ef94e88f28-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-fkq7j\" (UID: \"b17f52c4-7329-4262-87d2-d5ef94e88f28\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-fkq7j" Sep 29 19:40:55 crc kubenswrapper[4779]: I0929 19:40:55.307902 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b17f52c4-7329-4262-87d2-d5ef94e88f28-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-fkq7j\" (UID: \"b17f52c4-7329-4262-87d2-d5ef94e88f28\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-fkq7j" Sep 29 19:40:55 crc kubenswrapper[4779]: I0929 19:40:55.308056 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mpf6x\" (UniqueName: \"kubernetes.io/projected/b17f52c4-7329-4262-87d2-d5ef94e88f28-kube-api-access-mpf6x\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-fkq7j\" (UID: \"b17f52c4-7329-4262-87d2-d5ef94e88f28\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-fkq7j" Sep 29 19:40:55 crc kubenswrapper[4779]: I0929 19:40:55.409605 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/b17f52c4-7329-4262-87d2-d5ef94e88f28-nova-metadata-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-fkq7j\" (UID: \"b17f52c4-7329-4262-87d2-d5ef94e88f28\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-fkq7j" Sep 29 19:40:55 crc kubenswrapper[4779]: I0929 19:40:55.409690 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/b17f52c4-7329-4262-87d2-d5ef94e88f28-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-fkq7j\" (UID: \"b17f52c4-7329-4262-87d2-d5ef94e88f28\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-fkq7j" Sep 29 19:40:55 crc kubenswrapper[4779]: I0929 19:40:55.409759 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b17f52c4-7329-4262-87d2-d5ef94e88f28-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-fkq7j\" (UID: \"b17f52c4-7329-4262-87d2-d5ef94e88f28\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-fkq7j" Sep 29 19:40:55 crc kubenswrapper[4779]: I0929 19:40:55.409793 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mpf6x\" (UniqueName: \"kubernetes.io/projected/b17f52c4-7329-4262-87d2-d5ef94e88f28-kube-api-access-mpf6x\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-fkq7j\" (UID: \"b17f52c4-7329-4262-87d2-d5ef94e88f28\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-fkq7j" Sep 29 19:40:55 crc kubenswrapper[4779]: I0929 19:40:55.409842 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b17f52c4-7329-4262-87d2-d5ef94e88f28-inventory\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-fkq7j\" (UID: \"b17f52c4-7329-4262-87d2-d5ef94e88f28\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-fkq7j" Sep 29 19:40:55 crc kubenswrapper[4779]: I0929 19:40:55.409863 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/b17f52c4-7329-4262-87d2-d5ef94e88f28-ssh-key\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-fkq7j\" (UID: \"b17f52c4-7329-4262-87d2-d5ef94e88f28\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-fkq7j" Sep 29 19:40:55 crc kubenswrapper[4779]: I0929 19:40:55.416558 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/b17f52c4-7329-4262-87d2-d5ef94e88f28-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-fkq7j\" (UID: \"b17f52c4-7329-4262-87d2-d5ef94e88f28\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-fkq7j" Sep 29 19:40:55 crc kubenswrapper[4779]: I0929 19:40:55.420087 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/b17f52c4-7329-4262-87d2-d5ef94e88f28-ssh-key\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-fkq7j\" (UID: \"b17f52c4-7329-4262-87d2-d5ef94e88f28\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-fkq7j" Sep 29 19:40:55 crc kubenswrapper[4779]: I0929 19:40:55.420172 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b17f52c4-7329-4262-87d2-d5ef94e88f28-inventory\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-fkq7j\" (UID: \"b17f52c4-7329-4262-87d2-d5ef94e88f28\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-fkq7j" Sep 29 19:40:55 crc kubenswrapper[4779]: I0929 19:40:55.420356 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b17f52c4-7329-4262-87d2-d5ef94e88f28-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-fkq7j\" (UID: \"b17f52c4-7329-4262-87d2-d5ef94e88f28\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-fkq7j" Sep 29 19:40:55 crc kubenswrapper[4779]: I0929 19:40:55.420567 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/b17f52c4-7329-4262-87d2-d5ef94e88f28-nova-metadata-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-fkq7j\" (UID: \"b17f52c4-7329-4262-87d2-d5ef94e88f28\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-fkq7j" Sep 29 19:40:55 crc kubenswrapper[4779]: I0929 19:40:55.427930 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mpf6x\" (UniqueName: \"kubernetes.io/projected/b17f52c4-7329-4262-87d2-d5ef94e88f28-kube-api-access-mpf6x\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-fkq7j\" (UID: \"b17f52c4-7329-4262-87d2-d5ef94e88f28\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-fkq7j" Sep 29 19:40:55 crc kubenswrapper[4779]: I0929 19:40:55.519189 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-fkq7j" Sep 29 19:40:56 crc kubenswrapper[4779]: I0929 19:40:56.084825 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-fkq7j"] Sep 29 19:40:56 crc kubenswrapper[4779]: I0929 19:40:56.107621 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-fkq7j" event={"ID":"b17f52c4-7329-4262-87d2-d5ef94e88f28","Type":"ContainerStarted","Data":"d504701a3b9fdbc44c4e645cc371c19f33a5df53d682d89b3cd46919129b0761"} Sep 29 19:40:57 crc kubenswrapper[4779]: I0929 19:40:57.122048 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-fkq7j" event={"ID":"b17f52c4-7329-4262-87d2-d5ef94e88f28","Type":"ContainerStarted","Data":"580d79135c2518faf2648c5af3cd8ed334b8f0f25657fac9c90828ca863f935f"} Sep 29 19:40:57 crc kubenswrapper[4779]: I0929 19:40:57.156002 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-fkq7j" podStartSLOduration=1.561109651 podStartE2EDuration="2.155972412s" podCreationTimestamp="2025-09-29 19:40:55 +0000 UTC" firstStartedPulling="2025-09-29 19:40:56.094137851 +0000 UTC m=+1966.978562961" lastFinishedPulling="2025-09-29 19:40:56.689000612 +0000 UTC m=+1967.573425722" observedRunningTime="2025-09-29 19:40:57.14640025 +0000 UTC m=+1968.030825380" watchObservedRunningTime="2025-09-29 19:40:57.155972412 +0000 UTC m=+1968.040397542" Sep 29 19:40:57 crc kubenswrapper[4779]: I0929 19:40:57.770559 4779 scope.go:117] "RemoveContainer" containerID="cabb38360f260f953132a0f1382a2873fbc40a1355bf1109d5e07b29c41a2fbb" Sep 29 19:40:57 crc kubenswrapper[4779]: E0929 19:40:57.771174 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d5cnr_openshift-machine-config-operator(476bc421-1113-455e-bcc8-e207e47dad19)\"" pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" podUID="476bc421-1113-455e-bcc8-e207e47dad19" Sep 29 19:41:08 crc kubenswrapper[4779]: I0929 19:41:08.766794 4779 scope.go:117] "RemoveContainer" containerID="cabb38360f260f953132a0f1382a2873fbc40a1355bf1109d5e07b29c41a2fbb" Sep 29 19:41:08 crc kubenswrapper[4779]: E0929 19:41:08.768048 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d5cnr_openshift-machine-config-operator(476bc421-1113-455e-bcc8-e207e47dad19)\"" pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" podUID="476bc421-1113-455e-bcc8-e207e47dad19" Sep 29 19:41:19 crc kubenswrapper[4779]: I0929 19:41:19.772805 4779 scope.go:117] "RemoveContainer" containerID="cabb38360f260f953132a0f1382a2873fbc40a1355bf1109d5e07b29c41a2fbb" Sep 29 19:41:20 crc kubenswrapper[4779]: I0929 19:41:20.357655 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" event={"ID":"476bc421-1113-455e-bcc8-e207e47dad19","Type":"ContainerStarted","Data":"9fb2d77a4f4cf683107843d418292174e771334c111af4805d091312c11c1c0e"} Sep 29 19:41:48 crc kubenswrapper[4779]: I0929 19:41:48.164298 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-ts8gh"] Sep 29 19:41:48 crc kubenswrapper[4779]: I0929 19:41:48.167750 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-ts8gh" Sep 29 19:41:48 crc kubenswrapper[4779]: I0929 19:41:48.181887 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-ts8gh"] Sep 29 19:41:48 crc kubenswrapper[4779]: I0929 19:41:48.340757 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tvtbr\" (UniqueName: \"kubernetes.io/projected/39a4c9e9-4cf9-481d-a085-02193b956a68-kube-api-access-tvtbr\") pod \"redhat-marketplace-ts8gh\" (UID: \"39a4c9e9-4cf9-481d-a085-02193b956a68\") " pod="openshift-marketplace/redhat-marketplace-ts8gh" Sep 29 19:41:48 crc kubenswrapper[4779]: I0929 19:41:48.340899 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/39a4c9e9-4cf9-481d-a085-02193b956a68-catalog-content\") pod \"redhat-marketplace-ts8gh\" (UID: \"39a4c9e9-4cf9-481d-a085-02193b956a68\") " pod="openshift-marketplace/redhat-marketplace-ts8gh" Sep 29 19:41:48 crc kubenswrapper[4779]: I0929 19:41:48.340998 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/39a4c9e9-4cf9-481d-a085-02193b956a68-utilities\") pod \"redhat-marketplace-ts8gh\" (UID: \"39a4c9e9-4cf9-481d-a085-02193b956a68\") " pod="openshift-marketplace/redhat-marketplace-ts8gh" Sep 29 19:41:48 crc kubenswrapper[4779]: I0929 19:41:48.443208 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tvtbr\" (UniqueName: \"kubernetes.io/projected/39a4c9e9-4cf9-481d-a085-02193b956a68-kube-api-access-tvtbr\") pod \"redhat-marketplace-ts8gh\" (UID: \"39a4c9e9-4cf9-481d-a085-02193b956a68\") " pod="openshift-marketplace/redhat-marketplace-ts8gh" Sep 29 19:41:48 crc kubenswrapper[4779]: I0929 19:41:48.443327 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/39a4c9e9-4cf9-481d-a085-02193b956a68-catalog-content\") pod \"redhat-marketplace-ts8gh\" (UID: \"39a4c9e9-4cf9-481d-a085-02193b956a68\") " pod="openshift-marketplace/redhat-marketplace-ts8gh" Sep 29 19:41:48 crc kubenswrapper[4779]: I0929 19:41:48.443443 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/39a4c9e9-4cf9-481d-a085-02193b956a68-utilities\") pod \"redhat-marketplace-ts8gh\" (UID: \"39a4c9e9-4cf9-481d-a085-02193b956a68\") " pod="openshift-marketplace/redhat-marketplace-ts8gh" Sep 29 19:41:48 crc kubenswrapper[4779]: I0929 19:41:48.443980 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/39a4c9e9-4cf9-481d-a085-02193b956a68-utilities\") pod \"redhat-marketplace-ts8gh\" (UID: \"39a4c9e9-4cf9-481d-a085-02193b956a68\") " pod="openshift-marketplace/redhat-marketplace-ts8gh" Sep 29 19:41:48 crc kubenswrapper[4779]: I0929 19:41:48.444044 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/39a4c9e9-4cf9-481d-a085-02193b956a68-catalog-content\") pod \"redhat-marketplace-ts8gh\" (UID: \"39a4c9e9-4cf9-481d-a085-02193b956a68\") " pod="openshift-marketplace/redhat-marketplace-ts8gh" Sep 29 19:41:48 crc kubenswrapper[4779]: I0929 19:41:48.475294 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tvtbr\" (UniqueName: \"kubernetes.io/projected/39a4c9e9-4cf9-481d-a085-02193b956a68-kube-api-access-tvtbr\") pod \"redhat-marketplace-ts8gh\" (UID: \"39a4c9e9-4cf9-481d-a085-02193b956a68\") " pod="openshift-marketplace/redhat-marketplace-ts8gh" Sep 29 19:41:48 crc kubenswrapper[4779]: I0929 19:41:48.484971 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-ts8gh" Sep 29 19:41:48 crc kubenswrapper[4779]: I0929 19:41:48.652363 4779 generic.go:334] "Generic (PLEG): container finished" podID="b17f52c4-7329-4262-87d2-d5ef94e88f28" containerID="580d79135c2518faf2648c5af3cd8ed334b8f0f25657fac9c90828ca863f935f" exitCode=0 Sep 29 19:41:48 crc kubenswrapper[4779]: I0929 19:41:48.652409 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-fkq7j" event={"ID":"b17f52c4-7329-4262-87d2-d5ef94e88f28","Type":"ContainerDied","Data":"580d79135c2518faf2648c5af3cd8ed334b8f0f25657fac9c90828ca863f935f"} Sep 29 19:41:49 crc kubenswrapper[4779]: I0929 19:41:49.074450 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-ts8gh"] Sep 29 19:41:49 crc kubenswrapper[4779]: I0929 19:41:49.665239 4779 generic.go:334] "Generic (PLEG): container finished" podID="39a4c9e9-4cf9-481d-a085-02193b956a68" containerID="863bcc7d8934f0b490929839c11580f23121d6bf117b60aa74190eb5d90d4e41" exitCode=0 Sep 29 19:41:49 crc kubenswrapper[4779]: I0929 19:41:49.665302 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-ts8gh" event={"ID":"39a4c9e9-4cf9-481d-a085-02193b956a68","Type":"ContainerDied","Data":"863bcc7d8934f0b490929839c11580f23121d6bf117b60aa74190eb5d90d4e41"} Sep 29 19:41:49 crc kubenswrapper[4779]: I0929 19:41:49.665370 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-ts8gh" event={"ID":"39a4c9e9-4cf9-481d-a085-02193b956a68","Type":"ContainerStarted","Data":"a6375fdfb2b6087369a293b68ba821856a99c87f3098b94c7aed1e046c3a7b29"} Sep 29 19:41:50 crc kubenswrapper[4779]: I0929 19:41:50.103852 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-fkq7j" Sep 29 19:41:50 crc kubenswrapper[4779]: I0929 19:41:50.278189 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/b17f52c4-7329-4262-87d2-d5ef94e88f28-nova-metadata-neutron-config-0\") pod \"b17f52c4-7329-4262-87d2-d5ef94e88f28\" (UID: \"b17f52c4-7329-4262-87d2-d5ef94e88f28\") " Sep 29 19:41:50 crc kubenswrapper[4779]: I0929 19:41:50.278261 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b17f52c4-7329-4262-87d2-d5ef94e88f28-neutron-metadata-combined-ca-bundle\") pod \"b17f52c4-7329-4262-87d2-d5ef94e88f28\" (UID: \"b17f52c4-7329-4262-87d2-d5ef94e88f28\") " Sep 29 19:41:50 crc kubenswrapper[4779]: I0929 19:41:50.278277 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/b17f52c4-7329-4262-87d2-d5ef94e88f28-ssh-key\") pod \"b17f52c4-7329-4262-87d2-d5ef94e88f28\" (UID: \"b17f52c4-7329-4262-87d2-d5ef94e88f28\") " Sep 29 19:41:50 crc kubenswrapper[4779]: I0929 19:41:50.278360 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b17f52c4-7329-4262-87d2-d5ef94e88f28-inventory\") pod \"b17f52c4-7329-4262-87d2-d5ef94e88f28\" (UID: \"b17f52c4-7329-4262-87d2-d5ef94e88f28\") " Sep 29 19:41:50 crc kubenswrapper[4779]: I0929 19:41:50.279082 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/b17f52c4-7329-4262-87d2-d5ef94e88f28-neutron-ovn-metadata-agent-neutron-config-0\") pod \"b17f52c4-7329-4262-87d2-d5ef94e88f28\" (UID: \"b17f52c4-7329-4262-87d2-d5ef94e88f28\") " Sep 29 19:41:50 crc kubenswrapper[4779]: I0929 19:41:50.279147 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mpf6x\" (UniqueName: \"kubernetes.io/projected/b17f52c4-7329-4262-87d2-d5ef94e88f28-kube-api-access-mpf6x\") pod \"b17f52c4-7329-4262-87d2-d5ef94e88f28\" (UID: \"b17f52c4-7329-4262-87d2-d5ef94e88f28\") " Sep 29 19:41:50 crc kubenswrapper[4779]: I0929 19:41:50.283778 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b17f52c4-7329-4262-87d2-d5ef94e88f28-neutron-metadata-combined-ca-bundle" (OuterVolumeSpecName: "neutron-metadata-combined-ca-bundle") pod "b17f52c4-7329-4262-87d2-d5ef94e88f28" (UID: "b17f52c4-7329-4262-87d2-d5ef94e88f28"). InnerVolumeSpecName "neutron-metadata-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:41:50 crc kubenswrapper[4779]: I0929 19:41:50.283787 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b17f52c4-7329-4262-87d2-d5ef94e88f28-kube-api-access-mpf6x" (OuterVolumeSpecName: "kube-api-access-mpf6x") pod "b17f52c4-7329-4262-87d2-d5ef94e88f28" (UID: "b17f52c4-7329-4262-87d2-d5ef94e88f28"). InnerVolumeSpecName "kube-api-access-mpf6x". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:41:50 crc kubenswrapper[4779]: I0929 19:41:50.313399 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b17f52c4-7329-4262-87d2-d5ef94e88f28-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "b17f52c4-7329-4262-87d2-d5ef94e88f28" (UID: "b17f52c4-7329-4262-87d2-d5ef94e88f28"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:41:50 crc kubenswrapper[4779]: I0929 19:41:50.323828 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b17f52c4-7329-4262-87d2-d5ef94e88f28-nova-metadata-neutron-config-0" (OuterVolumeSpecName: "nova-metadata-neutron-config-0") pod "b17f52c4-7329-4262-87d2-d5ef94e88f28" (UID: "b17f52c4-7329-4262-87d2-d5ef94e88f28"). InnerVolumeSpecName "nova-metadata-neutron-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:41:50 crc kubenswrapper[4779]: I0929 19:41:50.330017 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b17f52c4-7329-4262-87d2-d5ef94e88f28-inventory" (OuterVolumeSpecName: "inventory") pod "b17f52c4-7329-4262-87d2-d5ef94e88f28" (UID: "b17f52c4-7329-4262-87d2-d5ef94e88f28"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:41:50 crc kubenswrapper[4779]: I0929 19:41:50.334720 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b17f52c4-7329-4262-87d2-d5ef94e88f28-neutron-ovn-metadata-agent-neutron-config-0" (OuterVolumeSpecName: "neutron-ovn-metadata-agent-neutron-config-0") pod "b17f52c4-7329-4262-87d2-d5ef94e88f28" (UID: "b17f52c4-7329-4262-87d2-d5ef94e88f28"). InnerVolumeSpecName "neutron-ovn-metadata-agent-neutron-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:41:50 crc kubenswrapper[4779]: I0929 19:41:50.380770 4779 reconciler_common.go:293] "Volume detached for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/b17f52c4-7329-4262-87d2-d5ef94e88f28-neutron-ovn-metadata-agent-neutron-config-0\") on node \"crc\" DevicePath \"\"" Sep 29 19:41:50 crc kubenswrapper[4779]: I0929 19:41:50.380804 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mpf6x\" (UniqueName: \"kubernetes.io/projected/b17f52c4-7329-4262-87d2-d5ef94e88f28-kube-api-access-mpf6x\") on node \"crc\" DevicePath \"\"" Sep 29 19:41:50 crc kubenswrapper[4779]: I0929 19:41:50.380814 4779 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/b17f52c4-7329-4262-87d2-d5ef94e88f28-nova-metadata-neutron-config-0\") on node \"crc\" DevicePath \"\"" Sep 29 19:41:50 crc kubenswrapper[4779]: I0929 19:41:50.380823 4779 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/b17f52c4-7329-4262-87d2-d5ef94e88f28-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 29 19:41:50 crc kubenswrapper[4779]: I0929 19:41:50.380832 4779 reconciler_common.go:293] "Volume detached for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b17f52c4-7329-4262-87d2-d5ef94e88f28-neutron-metadata-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 19:41:50 crc kubenswrapper[4779]: I0929 19:41:50.380842 4779 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b17f52c4-7329-4262-87d2-d5ef94e88f28-inventory\") on node \"crc\" DevicePath \"\"" Sep 29 19:41:50 crc kubenswrapper[4779]: I0929 19:41:50.673915 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-fkq7j" event={"ID":"b17f52c4-7329-4262-87d2-d5ef94e88f28","Type":"ContainerDied","Data":"d504701a3b9fdbc44c4e645cc371c19f33a5df53d682d89b3cd46919129b0761"} Sep 29 19:41:50 crc kubenswrapper[4779]: I0929 19:41:50.673954 4779 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d504701a3b9fdbc44c4e645cc371c19f33a5df53d682d89b3cd46919129b0761" Sep 29 19:41:50 crc kubenswrapper[4779]: I0929 19:41:50.673975 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-fkq7j" Sep 29 19:41:50 crc kubenswrapper[4779]: I0929 19:41:50.817051 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/libvirt-edpm-deployment-openstack-edpm-ipam-r5m6z"] Sep 29 19:41:50 crc kubenswrapper[4779]: E0929 19:41:50.817822 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b17f52c4-7329-4262-87d2-d5ef94e88f28" containerName="neutron-metadata-edpm-deployment-openstack-edpm-ipam" Sep 29 19:41:50 crc kubenswrapper[4779]: I0929 19:41:50.817847 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="b17f52c4-7329-4262-87d2-d5ef94e88f28" containerName="neutron-metadata-edpm-deployment-openstack-edpm-ipam" Sep 29 19:41:50 crc kubenswrapper[4779]: I0929 19:41:50.818087 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="b17f52c4-7329-4262-87d2-d5ef94e88f28" containerName="neutron-metadata-edpm-deployment-openstack-edpm-ipam" Sep 29 19:41:50 crc kubenswrapper[4779]: I0929 19:41:50.818835 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-r5m6z" Sep 29 19:41:50 crc kubenswrapper[4779]: I0929 19:41:50.822787 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 29 19:41:50 crc kubenswrapper[4779]: I0929 19:41:50.822936 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Sep 29 19:41:50 crc kubenswrapper[4779]: I0929 19:41:50.822986 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-v8454" Sep 29 19:41:50 crc kubenswrapper[4779]: I0929 19:41:50.823650 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"libvirt-secret" Sep 29 19:41:50 crc kubenswrapper[4779]: I0929 19:41:50.824392 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Sep 29 19:41:50 crc kubenswrapper[4779]: I0929 19:41:50.827592 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/libvirt-edpm-deployment-openstack-edpm-ipam-r5m6z"] Sep 29 19:41:50 crc kubenswrapper[4779]: I0929 19:41:50.892602 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/7ff61060-93f6-4bd6-a6f9-75195322a8d2-ssh-key\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-r5m6z\" (UID: \"7ff61060-93f6-4bd6-a6f9-75195322a8d2\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-r5m6z" Sep 29 19:41:50 crc kubenswrapper[4779]: I0929 19:41:50.892679 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dcmtr\" (UniqueName: \"kubernetes.io/projected/7ff61060-93f6-4bd6-a6f9-75195322a8d2-kube-api-access-dcmtr\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-r5m6z\" (UID: \"7ff61060-93f6-4bd6-a6f9-75195322a8d2\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-r5m6z" Sep 29 19:41:50 crc kubenswrapper[4779]: I0929 19:41:50.892796 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/7ff61060-93f6-4bd6-a6f9-75195322a8d2-libvirt-secret-0\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-r5m6z\" (UID: \"7ff61060-93f6-4bd6-a6f9-75195322a8d2\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-r5m6z" Sep 29 19:41:50 crc kubenswrapper[4779]: I0929 19:41:50.892819 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/7ff61060-93f6-4bd6-a6f9-75195322a8d2-inventory\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-r5m6z\" (UID: \"7ff61060-93f6-4bd6-a6f9-75195322a8d2\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-r5m6z" Sep 29 19:41:50 crc kubenswrapper[4779]: I0929 19:41:50.892943 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7ff61060-93f6-4bd6-a6f9-75195322a8d2-libvirt-combined-ca-bundle\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-r5m6z\" (UID: \"7ff61060-93f6-4bd6-a6f9-75195322a8d2\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-r5m6z" Sep 29 19:41:50 crc kubenswrapper[4779]: I0929 19:41:50.994486 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/7ff61060-93f6-4bd6-a6f9-75195322a8d2-libvirt-secret-0\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-r5m6z\" (UID: \"7ff61060-93f6-4bd6-a6f9-75195322a8d2\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-r5m6z" Sep 29 19:41:50 crc kubenswrapper[4779]: I0929 19:41:50.994551 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/7ff61060-93f6-4bd6-a6f9-75195322a8d2-inventory\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-r5m6z\" (UID: \"7ff61060-93f6-4bd6-a6f9-75195322a8d2\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-r5m6z" Sep 29 19:41:50 crc kubenswrapper[4779]: I0929 19:41:50.994618 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7ff61060-93f6-4bd6-a6f9-75195322a8d2-libvirt-combined-ca-bundle\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-r5m6z\" (UID: \"7ff61060-93f6-4bd6-a6f9-75195322a8d2\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-r5m6z" Sep 29 19:41:50 crc kubenswrapper[4779]: I0929 19:41:50.994708 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/7ff61060-93f6-4bd6-a6f9-75195322a8d2-ssh-key\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-r5m6z\" (UID: \"7ff61060-93f6-4bd6-a6f9-75195322a8d2\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-r5m6z" Sep 29 19:41:50 crc kubenswrapper[4779]: I0929 19:41:50.994790 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dcmtr\" (UniqueName: \"kubernetes.io/projected/7ff61060-93f6-4bd6-a6f9-75195322a8d2-kube-api-access-dcmtr\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-r5m6z\" (UID: \"7ff61060-93f6-4bd6-a6f9-75195322a8d2\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-r5m6z" Sep 29 19:41:51 crc kubenswrapper[4779]: I0929 19:41:51.000828 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/7ff61060-93f6-4bd6-a6f9-75195322a8d2-libvirt-secret-0\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-r5m6z\" (UID: \"7ff61060-93f6-4bd6-a6f9-75195322a8d2\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-r5m6z" Sep 29 19:41:51 crc kubenswrapper[4779]: I0929 19:41:51.001410 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/7ff61060-93f6-4bd6-a6f9-75195322a8d2-ssh-key\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-r5m6z\" (UID: \"7ff61060-93f6-4bd6-a6f9-75195322a8d2\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-r5m6z" Sep 29 19:41:51 crc kubenswrapper[4779]: I0929 19:41:51.001957 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/7ff61060-93f6-4bd6-a6f9-75195322a8d2-inventory\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-r5m6z\" (UID: \"7ff61060-93f6-4bd6-a6f9-75195322a8d2\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-r5m6z" Sep 29 19:41:51 crc kubenswrapper[4779]: I0929 19:41:51.003341 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7ff61060-93f6-4bd6-a6f9-75195322a8d2-libvirt-combined-ca-bundle\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-r5m6z\" (UID: \"7ff61060-93f6-4bd6-a6f9-75195322a8d2\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-r5m6z" Sep 29 19:41:51 crc kubenswrapper[4779]: I0929 19:41:51.012797 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dcmtr\" (UniqueName: \"kubernetes.io/projected/7ff61060-93f6-4bd6-a6f9-75195322a8d2-kube-api-access-dcmtr\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-r5m6z\" (UID: \"7ff61060-93f6-4bd6-a6f9-75195322a8d2\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-r5m6z" Sep 29 19:41:51 crc kubenswrapper[4779]: I0929 19:41:51.191900 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-r5m6z" Sep 29 19:41:51 crc kubenswrapper[4779]: I0929 19:41:51.572111 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/libvirt-edpm-deployment-openstack-edpm-ipam-r5m6z"] Sep 29 19:41:51 crc kubenswrapper[4779]: W0929 19:41:51.576607 4779 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7ff61060_93f6_4bd6_a6f9_75195322a8d2.slice/crio-bd3e44e27a96f5d2cefd65a8b8c5b0854e4d298afdeb3a1f45215167f4d12873 WatchSource:0}: Error finding container bd3e44e27a96f5d2cefd65a8b8c5b0854e4d298afdeb3a1f45215167f4d12873: Status 404 returned error can't find the container with id bd3e44e27a96f5d2cefd65a8b8c5b0854e4d298afdeb3a1f45215167f4d12873 Sep 29 19:41:51 crc kubenswrapper[4779]: I0929 19:41:51.684307 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-r5m6z" event={"ID":"7ff61060-93f6-4bd6-a6f9-75195322a8d2","Type":"ContainerStarted","Data":"bd3e44e27a96f5d2cefd65a8b8c5b0854e4d298afdeb3a1f45215167f4d12873"} Sep 29 19:41:51 crc kubenswrapper[4779]: I0929 19:41:51.686164 4779 generic.go:334] "Generic (PLEG): container finished" podID="39a4c9e9-4cf9-481d-a085-02193b956a68" containerID="cf2bdab29b0f51344b92b4e81b49ccc2f7b3a901db8b13b2cb5ab75ef7eec4d8" exitCode=0 Sep 29 19:41:51 crc kubenswrapper[4779]: I0929 19:41:51.686203 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-ts8gh" event={"ID":"39a4c9e9-4cf9-481d-a085-02193b956a68","Type":"ContainerDied","Data":"cf2bdab29b0f51344b92b4e81b49ccc2f7b3a901db8b13b2cb5ab75ef7eec4d8"} Sep 29 19:41:52 crc kubenswrapper[4779]: I0929 19:41:52.704154 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-r5m6z" event={"ID":"7ff61060-93f6-4bd6-a6f9-75195322a8d2","Type":"ContainerStarted","Data":"6e1e8d619b02fff62b076ca07fa7c977da40e3f70d5a18de585946b87e10b666"} Sep 29 19:41:52 crc kubenswrapper[4779]: I0929 19:41:52.712549 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-ts8gh" event={"ID":"39a4c9e9-4cf9-481d-a085-02193b956a68","Type":"ContainerStarted","Data":"900d33a84d3ef188b8ea9aa5dadf7b427003533db1c594d67e5f3919b182b05c"} Sep 29 19:41:52 crc kubenswrapper[4779]: I0929 19:41:52.738549 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-r5m6z" podStartSLOduration=2.1804330690000002 podStartE2EDuration="2.738533485s" podCreationTimestamp="2025-09-29 19:41:50 +0000 UTC" firstStartedPulling="2025-09-29 19:41:51.579278827 +0000 UTC m=+2022.463703927" lastFinishedPulling="2025-09-29 19:41:52.137379203 +0000 UTC m=+2023.021804343" observedRunningTime="2025-09-29 19:41:52.737763824 +0000 UTC m=+2023.622188934" watchObservedRunningTime="2025-09-29 19:41:52.738533485 +0000 UTC m=+2023.622958585" Sep 29 19:41:52 crc kubenswrapper[4779]: I0929 19:41:52.764261 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-ts8gh" podStartSLOduration=2.131294343 podStartE2EDuration="4.764238088s" podCreationTimestamp="2025-09-29 19:41:48 +0000 UTC" firstStartedPulling="2025-09-29 19:41:49.669092796 +0000 UTC m=+2020.553517916" lastFinishedPulling="2025-09-29 19:41:52.302036551 +0000 UTC m=+2023.186461661" observedRunningTime="2025-09-29 19:41:52.758267664 +0000 UTC m=+2023.642692754" watchObservedRunningTime="2025-09-29 19:41:52.764238088 +0000 UTC m=+2023.648663188" Sep 29 19:41:58 crc kubenswrapper[4779]: I0929 19:41:58.485161 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-ts8gh" Sep 29 19:41:58 crc kubenswrapper[4779]: I0929 19:41:58.485788 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-ts8gh" Sep 29 19:41:58 crc kubenswrapper[4779]: I0929 19:41:58.551589 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-ts8gh" Sep 29 19:41:58 crc kubenswrapper[4779]: I0929 19:41:58.850532 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-ts8gh" Sep 29 19:41:58 crc kubenswrapper[4779]: I0929 19:41:58.911711 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-ts8gh"] Sep 29 19:42:00 crc kubenswrapper[4779]: I0929 19:42:00.802606 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-ts8gh" podUID="39a4c9e9-4cf9-481d-a085-02193b956a68" containerName="registry-server" containerID="cri-o://900d33a84d3ef188b8ea9aa5dadf7b427003533db1c594d67e5f3919b182b05c" gracePeriod=2 Sep 29 19:42:01 crc kubenswrapper[4779]: E0929 19:42:01.016653 4779 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod39a4c9e9_4cf9_481d_a085_02193b956a68.slice/crio-900d33a84d3ef188b8ea9aa5dadf7b427003533db1c594d67e5f3919b182b05c.scope\": RecentStats: unable to find data in memory cache]" Sep 29 19:42:01 crc kubenswrapper[4779]: I0929 19:42:01.213582 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-ts8gh" Sep 29 19:42:01 crc kubenswrapper[4779]: I0929 19:42:01.326773 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/39a4c9e9-4cf9-481d-a085-02193b956a68-utilities\") pod \"39a4c9e9-4cf9-481d-a085-02193b956a68\" (UID: \"39a4c9e9-4cf9-481d-a085-02193b956a68\") " Sep 29 19:42:01 crc kubenswrapper[4779]: I0929 19:42:01.327692 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/39a4c9e9-4cf9-481d-a085-02193b956a68-catalog-content\") pod \"39a4c9e9-4cf9-481d-a085-02193b956a68\" (UID: \"39a4c9e9-4cf9-481d-a085-02193b956a68\") " Sep 29 19:42:01 crc kubenswrapper[4779]: I0929 19:42:01.327713 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/39a4c9e9-4cf9-481d-a085-02193b956a68-utilities" (OuterVolumeSpecName: "utilities") pod "39a4c9e9-4cf9-481d-a085-02193b956a68" (UID: "39a4c9e9-4cf9-481d-a085-02193b956a68"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 19:42:01 crc kubenswrapper[4779]: I0929 19:42:01.327768 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tvtbr\" (UniqueName: \"kubernetes.io/projected/39a4c9e9-4cf9-481d-a085-02193b956a68-kube-api-access-tvtbr\") pod \"39a4c9e9-4cf9-481d-a085-02193b956a68\" (UID: \"39a4c9e9-4cf9-481d-a085-02193b956a68\") " Sep 29 19:42:01 crc kubenswrapper[4779]: I0929 19:42:01.328585 4779 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/39a4c9e9-4cf9-481d-a085-02193b956a68-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 19:42:01 crc kubenswrapper[4779]: I0929 19:42:01.335728 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/39a4c9e9-4cf9-481d-a085-02193b956a68-kube-api-access-tvtbr" (OuterVolumeSpecName: "kube-api-access-tvtbr") pod "39a4c9e9-4cf9-481d-a085-02193b956a68" (UID: "39a4c9e9-4cf9-481d-a085-02193b956a68"). InnerVolumeSpecName "kube-api-access-tvtbr". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:42:01 crc kubenswrapper[4779]: I0929 19:42:01.340654 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/39a4c9e9-4cf9-481d-a085-02193b956a68-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "39a4c9e9-4cf9-481d-a085-02193b956a68" (UID: "39a4c9e9-4cf9-481d-a085-02193b956a68"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 19:42:01 crc kubenswrapper[4779]: I0929 19:42:01.430156 4779 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/39a4c9e9-4cf9-481d-a085-02193b956a68-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 19:42:01 crc kubenswrapper[4779]: I0929 19:42:01.430197 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tvtbr\" (UniqueName: \"kubernetes.io/projected/39a4c9e9-4cf9-481d-a085-02193b956a68-kube-api-access-tvtbr\") on node \"crc\" DevicePath \"\"" Sep 29 19:42:01 crc kubenswrapper[4779]: I0929 19:42:01.818243 4779 generic.go:334] "Generic (PLEG): container finished" podID="39a4c9e9-4cf9-481d-a085-02193b956a68" containerID="900d33a84d3ef188b8ea9aa5dadf7b427003533db1c594d67e5f3919b182b05c" exitCode=0 Sep 29 19:42:01 crc kubenswrapper[4779]: I0929 19:42:01.818414 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-ts8gh" event={"ID":"39a4c9e9-4cf9-481d-a085-02193b956a68","Type":"ContainerDied","Data":"900d33a84d3ef188b8ea9aa5dadf7b427003533db1c594d67e5f3919b182b05c"} Sep 29 19:42:01 crc kubenswrapper[4779]: I0929 19:42:01.818548 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-ts8gh" Sep 29 19:42:01 crc kubenswrapper[4779]: I0929 19:42:01.818619 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-ts8gh" event={"ID":"39a4c9e9-4cf9-481d-a085-02193b956a68","Type":"ContainerDied","Data":"a6375fdfb2b6087369a293b68ba821856a99c87f3098b94c7aed1e046c3a7b29"} Sep 29 19:42:01 crc kubenswrapper[4779]: I0929 19:42:01.818655 4779 scope.go:117] "RemoveContainer" containerID="900d33a84d3ef188b8ea9aa5dadf7b427003533db1c594d67e5f3919b182b05c" Sep 29 19:42:01 crc kubenswrapper[4779]: I0929 19:42:01.850000 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-ts8gh"] Sep 29 19:42:01 crc kubenswrapper[4779]: I0929 19:42:01.859499 4779 scope.go:117] "RemoveContainer" containerID="cf2bdab29b0f51344b92b4e81b49ccc2f7b3a901db8b13b2cb5ab75ef7eec4d8" Sep 29 19:42:01 crc kubenswrapper[4779]: I0929 19:42:01.862310 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-ts8gh"] Sep 29 19:42:01 crc kubenswrapper[4779]: I0929 19:42:01.883742 4779 scope.go:117] "RemoveContainer" containerID="863bcc7d8934f0b490929839c11580f23121d6bf117b60aa74190eb5d90d4e41" Sep 29 19:42:01 crc kubenswrapper[4779]: I0929 19:42:01.924725 4779 scope.go:117] "RemoveContainer" containerID="900d33a84d3ef188b8ea9aa5dadf7b427003533db1c594d67e5f3919b182b05c" Sep 29 19:42:01 crc kubenswrapper[4779]: E0929 19:42:01.925397 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"900d33a84d3ef188b8ea9aa5dadf7b427003533db1c594d67e5f3919b182b05c\": container with ID starting with 900d33a84d3ef188b8ea9aa5dadf7b427003533db1c594d67e5f3919b182b05c not found: ID does not exist" containerID="900d33a84d3ef188b8ea9aa5dadf7b427003533db1c594d67e5f3919b182b05c" Sep 29 19:42:01 crc kubenswrapper[4779]: I0929 19:42:01.925467 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"900d33a84d3ef188b8ea9aa5dadf7b427003533db1c594d67e5f3919b182b05c"} err="failed to get container status \"900d33a84d3ef188b8ea9aa5dadf7b427003533db1c594d67e5f3919b182b05c\": rpc error: code = NotFound desc = could not find container \"900d33a84d3ef188b8ea9aa5dadf7b427003533db1c594d67e5f3919b182b05c\": container with ID starting with 900d33a84d3ef188b8ea9aa5dadf7b427003533db1c594d67e5f3919b182b05c not found: ID does not exist" Sep 29 19:42:01 crc kubenswrapper[4779]: I0929 19:42:01.925500 4779 scope.go:117] "RemoveContainer" containerID="cf2bdab29b0f51344b92b4e81b49ccc2f7b3a901db8b13b2cb5ab75ef7eec4d8" Sep 29 19:42:01 crc kubenswrapper[4779]: E0929 19:42:01.925995 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cf2bdab29b0f51344b92b4e81b49ccc2f7b3a901db8b13b2cb5ab75ef7eec4d8\": container with ID starting with cf2bdab29b0f51344b92b4e81b49ccc2f7b3a901db8b13b2cb5ab75ef7eec4d8 not found: ID does not exist" containerID="cf2bdab29b0f51344b92b4e81b49ccc2f7b3a901db8b13b2cb5ab75ef7eec4d8" Sep 29 19:42:01 crc kubenswrapper[4779]: I0929 19:42:01.926029 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cf2bdab29b0f51344b92b4e81b49ccc2f7b3a901db8b13b2cb5ab75ef7eec4d8"} err="failed to get container status \"cf2bdab29b0f51344b92b4e81b49ccc2f7b3a901db8b13b2cb5ab75ef7eec4d8\": rpc error: code = NotFound desc = could not find container \"cf2bdab29b0f51344b92b4e81b49ccc2f7b3a901db8b13b2cb5ab75ef7eec4d8\": container with ID starting with cf2bdab29b0f51344b92b4e81b49ccc2f7b3a901db8b13b2cb5ab75ef7eec4d8 not found: ID does not exist" Sep 29 19:42:01 crc kubenswrapper[4779]: I0929 19:42:01.926117 4779 scope.go:117] "RemoveContainer" containerID="863bcc7d8934f0b490929839c11580f23121d6bf117b60aa74190eb5d90d4e41" Sep 29 19:42:01 crc kubenswrapper[4779]: E0929 19:42:01.926501 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"863bcc7d8934f0b490929839c11580f23121d6bf117b60aa74190eb5d90d4e41\": container with ID starting with 863bcc7d8934f0b490929839c11580f23121d6bf117b60aa74190eb5d90d4e41 not found: ID does not exist" containerID="863bcc7d8934f0b490929839c11580f23121d6bf117b60aa74190eb5d90d4e41" Sep 29 19:42:01 crc kubenswrapper[4779]: I0929 19:42:01.926538 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"863bcc7d8934f0b490929839c11580f23121d6bf117b60aa74190eb5d90d4e41"} err="failed to get container status \"863bcc7d8934f0b490929839c11580f23121d6bf117b60aa74190eb5d90d4e41\": rpc error: code = NotFound desc = could not find container \"863bcc7d8934f0b490929839c11580f23121d6bf117b60aa74190eb5d90d4e41\": container with ID starting with 863bcc7d8934f0b490929839c11580f23121d6bf117b60aa74190eb5d90d4e41 not found: ID does not exist" Sep 29 19:42:03 crc kubenswrapper[4779]: I0929 19:42:03.785484 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="39a4c9e9-4cf9-481d-a085-02193b956a68" path="/var/lib/kubelet/pods/39a4c9e9-4cf9-481d-a085-02193b956a68/volumes" Sep 29 19:43:43 crc kubenswrapper[4779]: I0929 19:43:43.784733 4779 patch_prober.go:28] interesting pod/machine-config-daemon-d5cnr container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 19:43:43 crc kubenswrapper[4779]: I0929 19:43:43.785381 4779 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" podUID="476bc421-1113-455e-bcc8-e207e47dad19" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 19:43:48 crc kubenswrapper[4779]: I0929 19:43:48.600885 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-z6cxt"] Sep 29 19:43:48 crc kubenswrapper[4779]: E0929 19:43:48.601713 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="39a4c9e9-4cf9-481d-a085-02193b956a68" containerName="registry-server" Sep 29 19:43:48 crc kubenswrapper[4779]: I0929 19:43:48.601732 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="39a4c9e9-4cf9-481d-a085-02193b956a68" containerName="registry-server" Sep 29 19:43:48 crc kubenswrapper[4779]: E0929 19:43:48.601758 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="39a4c9e9-4cf9-481d-a085-02193b956a68" containerName="extract-content" Sep 29 19:43:48 crc kubenswrapper[4779]: I0929 19:43:48.601766 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="39a4c9e9-4cf9-481d-a085-02193b956a68" containerName="extract-content" Sep 29 19:43:48 crc kubenswrapper[4779]: E0929 19:43:48.601805 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="39a4c9e9-4cf9-481d-a085-02193b956a68" containerName="extract-utilities" Sep 29 19:43:48 crc kubenswrapper[4779]: I0929 19:43:48.601814 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="39a4c9e9-4cf9-481d-a085-02193b956a68" containerName="extract-utilities" Sep 29 19:43:48 crc kubenswrapper[4779]: I0929 19:43:48.602486 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="39a4c9e9-4cf9-481d-a085-02193b956a68" containerName="registry-server" Sep 29 19:43:48 crc kubenswrapper[4779]: I0929 19:43:48.604224 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-z6cxt" Sep 29 19:43:48 crc kubenswrapper[4779]: I0929 19:43:48.625534 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-z6cxt"] Sep 29 19:43:48 crc kubenswrapper[4779]: I0929 19:43:48.704695 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5lxhd\" (UniqueName: \"kubernetes.io/projected/223c1224-2bba-409f-8e83-0052eaaf2caa-kube-api-access-5lxhd\") pod \"certified-operators-z6cxt\" (UID: \"223c1224-2bba-409f-8e83-0052eaaf2caa\") " pod="openshift-marketplace/certified-operators-z6cxt" Sep 29 19:43:48 crc kubenswrapper[4779]: I0929 19:43:48.705004 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/223c1224-2bba-409f-8e83-0052eaaf2caa-catalog-content\") pod \"certified-operators-z6cxt\" (UID: \"223c1224-2bba-409f-8e83-0052eaaf2caa\") " pod="openshift-marketplace/certified-operators-z6cxt" Sep 29 19:43:48 crc kubenswrapper[4779]: I0929 19:43:48.705044 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/223c1224-2bba-409f-8e83-0052eaaf2caa-utilities\") pod \"certified-operators-z6cxt\" (UID: \"223c1224-2bba-409f-8e83-0052eaaf2caa\") " pod="openshift-marketplace/certified-operators-z6cxt" Sep 29 19:43:48 crc kubenswrapper[4779]: I0929 19:43:48.806340 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/223c1224-2bba-409f-8e83-0052eaaf2caa-catalog-content\") pod \"certified-operators-z6cxt\" (UID: \"223c1224-2bba-409f-8e83-0052eaaf2caa\") " pod="openshift-marketplace/certified-operators-z6cxt" Sep 29 19:43:48 crc kubenswrapper[4779]: I0929 19:43:48.806598 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/223c1224-2bba-409f-8e83-0052eaaf2caa-utilities\") pod \"certified-operators-z6cxt\" (UID: \"223c1224-2bba-409f-8e83-0052eaaf2caa\") " pod="openshift-marketplace/certified-operators-z6cxt" Sep 29 19:43:48 crc kubenswrapper[4779]: I0929 19:43:48.806680 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5lxhd\" (UniqueName: \"kubernetes.io/projected/223c1224-2bba-409f-8e83-0052eaaf2caa-kube-api-access-5lxhd\") pod \"certified-operators-z6cxt\" (UID: \"223c1224-2bba-409f-8e83-0052eaaf2caa\") " pod="openshift-marketplace/certified-operators-z6cxt" Sep 29 19:43:48 crc kubenswrapper[4779]: I0929 19:43:48.806799 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/223c1224-2bba-409f-8e83-0052eaaf2caa-catalog-content\") pod \"certified-operators-z6cxt\" (UID: \"223c1224-2bba-409f-8e83-0052eaaf2caa\") " pod="openshift-marketplace/certified-operators-z6cxt" Sep 29 19:43:48 crc kubenswrapper[4779]: I0929 19:43:48.807098 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/223c1224-2bba-409f-8e83-0052eaaf2caa-utilities\") pod \"certified-operators-z6cxt\" (UID: \"223c1224-2bba-409f-8e83-0052eaaf2caa\") " pod="openshift-marketplace/certified-operators-z6cxt" Sep 29 19:43:48 crc kubenswrapper[4779]: I0929 19:43:48.827953 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5lxhd\" (UniqueName: \"kubernetes.io/projected/223c1224-2bba-409f-8e83-0052eaaf2caa-kube-api-access-5lxhd\") pod \"certified-operators-z6cxt\" (UID: \"223c1224-2bba-409f-8e83-0052eaaf2caa\") " pod="openshift-marketplace/certified-operators-z6cxt" Sep 29 19:43:48 crc kubenswrapper[4779]: I0929 19:43:48.941394 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-z6cxt" Sep 29 19:43:49 crc kubenswrapper[4779]: I0929 19:43:49.445818 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-z6cxt"] Sep 29 19:43:50 crc kubenswrapper[4779]: I0929 19:43:50.091993 4779 generic.go:334] "Generic (PLEG): container finished" podID="223c1224-2bba-409f-8e83-0052eaaf2caa" containerID="e3dd220437c15ffbd3cc4ad0f7d4a56706a7a3b5183d89c35d325862a68be010" exitCode=0 Sep 29 19:43:50 crc kubenswrapper[4779]: I0929 19:43:50.092128 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-z6cxt" event={"ID":"223c1224-2bba-409f-8e83-0052eaaf2caa","Type":"ContainerDied","Data":"e3dd220437c15ffbd3cc4ad0f7d4a56706a7a3b5183d89c35d325862a68be010"} Sep 29 19:43:50 crc kubenswrapper[4779]: I0929 19:43:50.092768 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-z6cxt" event={"ID":"223c1224-2bba-409f-8e83-0052eaaf2caa","Type":"ContainerStarted","Data":"b6bd72e96f6fcc7a3a1bb67018eeceb3ac724159233444359ab997af3a3cb852"} Sep 29 19:43:50 crc kubenswrapper[4779]: I0929 19:43:50.095063 4779 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Sep 29 19:43:52 crc kubenswrapper[4779]: I0929 19:43:52.119568 4779 generic.go:334] "Generic (PLEG): container finished" podID="223c1224-2bba-409f-8e83-0052eaaf2caa" containerID="cd3db3ac52fdf217798616a5f59af15507d730bd63666e1fd299290aca1efda2" exitCode=0 Sep 29 19:43:52 crc kubenswrapper[4779]: I0929 19:43:52.119807 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-z6cxt" event={"ID":"223c1224-2bba-409f-8e83-0052eaaf2caa","Type":"ContainerDied","Data":"cd3db3ac52fdf217798616a5f59af15507d730bd63666e1fd299290aca1efda2"} Sep 29 19:43:53 crc kubenswrapper[4779]: I0929 19:43:53.131651 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-z6cxt" event={"ID":"223c1224-2bba-409f-8e83-0052eaaf2caa","Type":"ContainerStarted","Data":"7c76440abf311a4c5da779d73ed16ac1bdab2af7b53a33a0e4bf114936f189f5"} Sep 29 19:43:53 crc kubenswrapper[4779]: I0929 19:43:53.160580 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-z6cxt" podStartSLOduration=2.661042071 podStartE2EDuration="5.160555622s" podCreationTimestamp="2025-09-29 19:43:48 +0000 UTC" firstStartedPulling="2025-09-29 19:43:50.094435702 +0000 UTC m=+2140.978860832" lastFinishedPulling="2025-09-29 19:43:52.593949253 +0000 UTC m=+2143.478374383" observedRunningTime="2025-09-29 19:43:53.155104533 +0000 UTC m=+2144.039529673" watchObservedRunningTime="2025-09-29 19:43:53.160555622 +0000 UTC m=+2144.044980742" Sep 29 19:43:54 crc kubenswrapper[4779]: I0929 19:43:54.397701 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-jdzg7"] Sep 29 19:43:54 crc kubenswrapper[4779]: I0929 19:43:54.400093 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-jdzg7" Sep 29 19:43:54 crc kubenswrapper[4779]: I0929 19:43:54.412796 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-jdzg7"] Sep 29 19:43:54 crc kubenswrapper[4779]: I0929 19:43:54.437205 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e77b6344-311f-4a32-b02e-ef1860c07144-catalog-content\") pod \"redhat-operators-jdzg7\" (UID: \"e77b6344-311f-4a32-b02e-ef1860c07144\") " pod="openshift-marketplace/redhat-operators-jdzg7" Sep 29 19:43:54 crc kubenswrapper[4779]: I0929 19:43:54.437383 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e77b6344-311f-4a32-b02e-ef1860c07144-utilities\") pod \"redhat-operators-jdzg7\" (UID: \"e77b6344-311f-4a32-b02e-ef1860c07144\") " pod="openshift-marketplace/redhat-operators-jdzg7" Sep 29 19:43:54 crc kubenswrapper[4779]: I0929 19:43:54.437433 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4bvvq\" (UniqueName: \"kubernetes.io/projected/e77b6344-311f-4a32-b02e-ef1860c07144-kube-api-access-4bvvq\") pod \"redhat-operators-jdzg7\" (UID: \"e77b6344-311f-4a32-b02e-ef1860c07144\") " pod="openshift-marketplace/redhat-operators-jdzg7" Sep 29 19:43:54 crc kubenswrapper[4779]: I0929 19:43:54.539688 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e77b6344-311f-4a32-b02e-ef1860c07144-catalog-content\") pod \"redhat-operators-jdzg7\" (UID: \"e77b6344-311f-4a32-b02e-ef1860c07144\") " pod="openshift-marketplace/redhat-operators-jdzg7" Sep 29 19:43:54 crc kubenswrapper[4779]: I0929 19:43:54.539812 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e77b6344-311f-4a32-b02e-ef1860c07144-utilities\") pod \"redhat-operators-jdzg7\" (UID: \"e77b6344-311f-4a32-b02e-ef1860c07144\") " pod="openshift-marketplace/redhat-operators-jdzg7" Sep 29 19:43:54 crc kubenswrapper[4779]: I0929 19:43:54.539856 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4bvvq\" (UniqueName: \"kubernetes.io/projected/e77b6344-311f-4a32-b02e-ef1860c07144-kube-api-access-4bvvq\") pod \"redhat-operators-jdzg7\" (UID: \"e77b6344-311f-4a32-b02e-ef1860c07144\") " pod="openshift-marketplace/redhat-operators-jdzg7" Sep 29 19:43:54 crc kubenswrapper[4779]: I0929 19:43:54.540815 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e77b6344-311f-4a32-b02e-ef1860c07144-catalog-content\") pod \"redhat-operators-jdzg7\" (UID: \"e77b6344-311f-4a32-b02e-ef1860c07144\") " pod="openshift-marketplace/redhat-operators-jdzg7" Sep 29 19:43:54 crc kubenswrapper[4779]: I0929 19:43:54.541121 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e77b6344-311f-4a32-b02e-ef1860c07144-utilities\") pod \"redhat-operators-jdzg7\" (UID: \"e77b6344-311f-4a32-b02e-ef1860c07144\") " pod="openshift-marketplace/redhat-operators-jdzg7" Sep 29 19:43:54 crc kubenswrapper[4779]: I0929 19:43:54.564458 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4bvvq\" (UniqueName: \"kubernetes.io/projected/e77b6344-311f-4a32-b02e-ef1860c07144-kube-api-access-4bvvq\") pod \"redhat-operators-jdzg7\" (UID: \"e77b6344-311f-4a32-b02e-ef1860c07144\") " pod="openshift-marketplace/redhat-operators-jdzg7" Sep 29 19:43:54 crc kubenswrapper[4779]: I0929 19:43:54.744470 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-jdzg7" Sep 29 19:43:55 crc kubenswrapper[4779]: I0929 19:43:55.211659 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-jdzg7"] Sep 29 19:43:56 crc kubenswrapper[4779]: I0929 19:43:56.160287 4779 generic.go:334] "Generic (PLEG): container finished" podID="e77b6344-311f-4a32-b02e-ef1860c07144" containerID="002320e8fdc0ecd14e603d0137c3f75046d59fa1dfcd36faf70874a815689f64" exitCode=0 Sep 29 19:43:56 crc kubenswrapper[4779]: I0929 19:43:56.160382 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-jdzg7" event={"ID":"e77b6344-311f-4a32-b02e-ef1860c07144","Type":"ContainerDied","Data":"002320e8fdc0ecd14e603d0137c3f75046d59fa1dfcd36faf70874a815689f64"} Sep 29 19:43:56 crc kubenswrapper[4779]: I0929 19:43:56.160423 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-jdzg7" event={"ID":"e77b6344-311f-4a32-b02e-ef1860c07144","Type":"ContainerStarted","Data":"6985df948d35d8e480f38b0c3c13d926d6eb86c8d40f95a217342ed04e2d84a1"} Sep 29 19:43:58 crc kubenswrapper[4779]: I0929 19:43:58.187571 4779 generic.go:334] "Generic (PLEG): container finished" podID="e77b6344-311f-4a32-b02e-ef1860c07144" containerID="972f09f967c13050232b9cfc7d7f842733a291828f534160945e483ccedfdd37" exitCode=0 Sep 29 19:43:58 crc kubenswrapper[4779]: I0929 19:43:58.187863 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-jdzg7" event={"ID":"e77b6344-311f-4a32-b02e-ef1860c07144","Type":"ContainerDied","Data":"972f09f967c13050232b9cfc7d7f842733a291828f534160945e483ccedfdd37"} Sep 29 19:43:58 crc kubenswrapper[4779]: I0929 19:43:58.942571 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-z6cxt" Sep 29 19:43:58 crc kubenswrapper[4779]: I0929 19:43:58.942945 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-z6cxt" Sep 29 19:43:59 crc kubenswrapper[4779]: I0929 19:43:59.012781 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-z6cxt" Sep 29 19:43:59 crc kubenswrapper[4779]: I0929 19:43:59.200075 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-jdzg7" event={"ID":"e77b6344-311f-4a32-b02e-ef1860c07144","Type":"ContainerStarted","Data":"4fc37563efc0a70044cba4af5fadaff356138c6d3e9b777350389e90ab2b50ae"} Sep 29 19:43:59 crc kubenswrapper[4779]: I0929 19:43:59.238929 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-jdzg7" podStartSLOduration=2.7098775699999997 podStartE2EDuration="5.238905717s" podCreationTimestamp="2025-09-29 19:43:54 +0000 UTC" firstStartedPulling="2025-09-29 19:43:56.162575259 +0000 UTC m=+2147.047000359" lastFinishedPulling="2025-09-29 19:43:58.691603366 +0000 UTC m=+2149.576028506" observedRunningTime="2025-09-29 19:43:59.229534481 +0000 UTC m=+2150.113959621" watchObservedRunningTime="2025-09-29 19:43:59.238905717 +0000 UTC m=+2150.123330857" Sep 29 19:43:59 crc kubenswrapper[4779]: I0929 19:43:59.281962 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-z6cxt" Sep 29 19:44:01 crc kubenswrapper[4779]: I0929 19:44:01.383383 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-z6cxt"] Sep 29 19:44:01 crc kubenswrapper[4779]: I0929 19:44:01.383859 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-z6cxt" podUID="223c1224-2bba-409f-8e83-0052eaaf2caa" containerName="registry-server" containerID="cri-o://7c76440abf311a4c5da779d73ed16ac1bdab2af7b53a33a0e4bf114936f189f5" gracePeriod=2 Sep 29 19:44:01 crc kubenswrapper[4779]: I0929 19:44:01.861613 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-z6cxt" Sep 29 19:44:01 crc kubenswrapper[4779]: I0929 19:44:01.922913 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5lxhd\" (UniqueName: \"kubernetes.io/projected/223c1224-2bba-409f-8e83-0052eaaf2caa-kube-api-access-5lxhd\") pod \"223c1224-2bba-409f-8e83-0052eaaf2caa\" (UID: \"223c1224-2bba-409f-8e83-0052eaaf2caa\") " Sep 29 19:44:01 crc kubenswrapper[4779]: I0929 19:44:01.923036 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/223c1224-2bba-409f-8e83-0052eaaf2caa-utilities\") pod \"223c1224-2bba-409f-8e83-0052eaaf2caa\" (UID: \"223c1224-2bba-409f-8e83-0052eaaf2caa\") " Sep 29 19:44:01 crc kubenswrapper[4779]: I0929 19:44:01.923061 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/223c1224-2bba-409f-8e83-0052eaaf2caa-catalog-content\") pod \"223c1224-2bba-409f-8e83-0052eaaf2caa\" (UID: \"223c1224-2bba-409f-8e83-0052eaaf2caa\") " Sep 29 19:44:01 crc kubenswrapper[4779]: I0929 19:44:01.923982 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/223c1224-2bba-409f-8e83-0052eaaf2caa-utilities" (OuterVolumeSpecName: "utilities") pod "223c1224-2bba-409f-8e83-0052eaaf2caa" (UID: "223c1224-2bba-409f-8e83-0052eaaf2caa"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 19:44:01 crc kubenswrapper[4779]: I0929 19:44:01.930475 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/223c1224-2bba-409f-8e83-0052eaaf2caa-kube-api-access-5lxhd" (OuterVolumeSpecName: "kube-api-access-5lxhd") pod "223c1224-2bba-409f-8e83-0052eaaf2caa" (UID: "223c1224-2bba-409f-8e83-0052eaaf2caa"). InnerVolumeSpecName "kube-api-access-5lxhd". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:44:01 crc kubenswrapper[4779]: I0929 19:44:01.980431 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/223c1224-2bba-409f-8e83-0052eaaf2caa-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "223c1224-2bba-409f-8e83-0052eaaf2caa" (UID: "223c1224-2bba-409f-8e83-0052eaaf2caa"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 19:44:02 crc kubenswrapper[4779]: I0929 19:44:02.025585 4779 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/223c1224-2bba-409f-8e83-0052eaaf2caa-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 19:44:02 crc kubenswrapper[4779]: I0929 19:44:02.025623 4779 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/223c1224-2bba-409f-8e83-0052eaaf2caa-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 19:44:02 crc kubenswrapper[4779]: I0929 19:44:02.025636 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5lxhd\" (UniqueName: \"kubernetes.io/projected/223c1224-2bba-409f-8e83-0052eaaf2caa-kube-api-access-5lxhd\") on node \"crc\" DevicePath \"\"" Sep 29 19:44:02 crc kubenswrapper[4779]: I0929 19:44:02.235541 4779 generic.go:334] "Generic (PLEG): container finished" podID="223c1224-2bba-409f-8e83-0052eaaf2caa" containerID="7c76440abf311a4c5da779d73ed16ac1bdab2af7b53a33a0e4bf114936f189f5" exitCode=0 Sep 29 19:44:02 crc kubenswrapper[4779]: I0929 19:44:02.235615 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-z6cxt" event={"ID":"223c1224-2bba-409f-8e83-0052eaaf2caa","Type":"ContainerDied","Data":"7c76440abf311a4c5da779d73ed16ac1bdab2af7b53a33a0e4bf114936f189f5"} Sep 29 19:44:02 crc kubenswrapper[4779]: I0929 19:44:02.235716 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-z6cxt" event={"ID":"223c1224-2bba-409f-8e83-0052eaaf2caa","Type":"ContainerDied","Data":"b6bd72e96f6fcc7a3a1bb67018eeceb3ac724159233444359ab997af3a3cb852"} Sep 29 19:44:02 crc kubenswrapper[4779]: I0929 19:44:02.235761 4779 scope.go:117] "RemoveContainer" containerID="7c76440abf311a4c5da779d73ed16ac1bdab2af7b53a33a0e4bf114936f189f5" Sep 29 19:44:02 crc kubenswrapper[4779]: I0929 19:44:02.235643 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-z6cxt" Sep 29 19:44:02 crc kubenswrapper[4779]: I0929 19:44:02.275628 4779 scope.go:117] "RemoveContainer" containerID="cd3db3ac52fdf217798616a5f59af15507d730bd63666e1fd299290aca1efda2" Sep 29 19:44:02 crc kubenswrapper[4779]: I0929 19:44:02.292205 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-z6cxt"] Sep 29 19:44:02 crc kubenswrapper[4779]: I0929 19:44:02.301793 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-z6cxt"] Sep 29 19:44:02 crc kubenswrapper[4779]: I0929 19:44:02.325368 4779 scope.go:117] "RemoveContainer" containerID="e3dd220437c15ffbd3cc4ad0f7d4a56706a7a3b5183d89c35d325862a68be010" Sep 29 19:44:02 crc kubenswrapper[4779]: I0929 19:44:02.362692 4779 scope.go:117] "RemoveContainer" containerID="7c76440abf311a4c5da779d73ed16ac1bdab2af7b53a33a0e4bf114936f189f5" Sep 29 19:44:02 crc kubenswrapper[4779]: E0929 19:44:02.363166 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7c76440abf311a4c5da779d73ed16ac1bdab2af7b53a33a0e4bf114936f189f5\": container with ID starting with 7c76440abf311a4c5da779d73ed16ac1bdab2af7b53a33a0e4bf114936f189f5 not found: ID does not exist" containerID="7c76440abf311a4c5da779d73ed16ac1bdab2af7b53a33a0e4bf114936f189f5" Sep 29 19:44:02 crc kubenswrapper[4779]: I0929 19:44:02.363227 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7c76440abf311a4c5da779d73ed16ac1bdab2af7b53a33a0e4bf114936f189f5"} err="failed to get container status \"7c76440abf311a4c5da779d73ed16ac1bdab2af7b53a33a0e4bf114936f189f5\": rpc error: code = NotFound desc = could not find container \"7c76440abf311a4c5da779d73ed16ac1bdab2af7b53a33a0e4bf114936f189f5\": container with ID starting with 7c76440abf311a4c5da779d73ed16ac1bdab2af7b53a33a0e4bf114936f189f5 not found: ID does not exist" Sep 29 19:44:02 crc kubenswrapper[4779]: I0929 19:44:02.363267 4779 scope.go:117] "RemoveContainer" containerID="cd3db3ac52fdf217798616a5f59af15507d730bd63666e1fd299290aca1efda2" Sep 29 19:44:02 crc kubenswrapper[4779]: E0929 19:44:02.363901 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cd3db3ac52fdf217798616a5f59af15507d730bd63666e1fd299290aca1efda2\": container with ID starting with cd3db3ac52fdf217798616a5f59af15507d730bd63666e1fd299290aca1efda2 not found: ID does not exist" containerID="cd3db3ac52fdf217798616a5f59af15507d730bd63666e1fd299290aca1efda2" Sep 29 19:44:02 crc kubenswrapper[4779]: I0929 19:44:02.363931 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cd3db3ac52fdf217798616a5f59af15507d730bd63666e1fd299290aca1efda2"} err="failed to get container status \"cd3db3ac52fdf217798616a5f59af15507d730bd63666e1fd299290aca1efda2\": rpc error: code = NotFound desc = could not find container \"cd3db3ac52fdf217798616a5f59af15507d730bd63666e1fd299290aca1efda2\": container with ID starting with cd3db3ac52fdf217798616a5f59af15507d730bd63666e1fd299290aca1efda2 not found: ID does not exist" Sep 29 19:44:02 crc kubenswrapper[4779]: I0929 19:44:02.363952 4779 scope.go:117] "RemoveContainer" containerID="e3dd220437c15ffbd3cc4ad0f7d4a56706a7a3b5183d89c35d325862a68be010" Sep 29 19:44:02 crc kubenswrapper[4779]: E0929 19:44:02.364624 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e3dd220437c15ffbd3cc4ad0f7d4a56706a7a3b5183d89c35d325862a68be010\": container with ID starting with e3dd220437c15ffbd3cc4ad0f7d4a56706a7a3b5183d89c35d325862a68be010 not found: ID does not exist" containerID="e3dd220437c15ffbd3cc4ad0f7d4a56706a7a3b5183d89c35d325862a68be010" Sep 29 19:44:02 crc kubenswrapper[4779]: I0929 19:44:02.364670 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e3dd220437c15ffbd3cc4ad0f7d4a56706a7a3b5183d89c35d325862a68be010"} err="failed to get container status \"e3dd220437c15ffbd3cc4ad0f7d4a56706a7a3b5183d89c35d325862a68be010\": rpc error: code = NotFound desc = could not find container \"e3dd220437c15ffbd3cc4ad0f7d4a56706a7a3b5183d89c35d325862a68be010\": container with ID starting with e3dd220437c15ffbd3cc4ad0f7d4a56706a7a3b5183d89c35d325862a68be010 not found: ID does not exist" Sep 29 19:44:03 crc kubenswrapper[4779]: I0929 19:44:03.790267 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="223c1224-2bba-409f-8e83-0052eaaf2caa" path="/var/lib/kubelet/pods/223c1224-2bba-409f-8e83-0052eaaf2caa/volumes" Sep 29 19:44:04 crc kubenswrapper[4779]: I0929 19:44:04.745201 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-jdzg7" Sep 29 19:44:04 crc kubenswrapper[4779]: I0929 19:44:04.745266 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-jdzg7" Sep 29 19:44:04 crc kubenswrapper[4779]: I0929 19:44:04.831639 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-jdzg7" Sep 29 19:44:05 crc kubenswrapper[4779]: I0929 19:44:05.319511 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-jdzg7" Sep 29 19:44:05 crc kubenswrapper[4779]: I0929 19:44:05.995700 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-jdzg7"] Sep 29 19:44:07 crc kubenswrapper[4779]: I0929 19:44:07.293162 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-jdzg7" podUID="e77b6344-311f-4a32-b02e-ef1860c07144" containerName="registry-server" containerID="cri-o://4fc37563efc0a70044cba4af5fadaff356138c6d3e9b777350389e90ab2b50ae" gracePeriod=2 Sep 29 19:44:07 crc kubenswrapper[4779]: I0929 19:44:07.745403 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-jdzg7" Sep 29 19:44:07 crc kubenswrapper[4779]: I0929 19:44:07.843142 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4bvvq\" (UniqueName: \"kubernetes.io/projected/e77b6344-311f-4a32-b02e-ef1860c07144-kube-api-access-4bvvq\") pod \"e77b6344-311f-4a32-b02e-ef1860c07144\" (UID: \"e77b6344-311f-4a32-b02e-ef1860c07144\") " Sep 29 19:44:07 crc kubenswrapper[4779]: I0929 19:44:07.843340 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e77b6344-311f-4a32-b02e-ef1860c07144-utilities\") pod \"e77b6344-311f-4a32-b02e-ef1860c07144\" (UID: \"e77b6344-311f-4a32-b02e-ef1860c07144\") " Sep 29 19:44:07 crc kubenswrapper[4779]: I0929 19:44:07.843371 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e77b6344-311f-4a32-b02e-ef1860c07144-catalog-content\") pod \"e77b6344-311f-4a32-b02e-ef1860c07144\" (UID: \"e77b6344-311f-4a32-b02e-ef1860c07144\") " Sep 29 19:44:07 crc kubenswrapper[4779]: I0929 19:44:07.844329 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e77b6344-311f-4a32-b02e-ef1860c07144-utilities" (OuterVolumeSpecName: "utilities") pod "e77b6344-311f-4a32-b02e-ef1860c07144" (UID: "e77b6344-311f-4a32-b02e-ef1860c07144"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 19:44:07 crc kubenswrapper[4779]: I0929 19:44:07.850547 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e77b6344-311f-4a32-b02e-ef1860c07144-kube-api-access-4bvvq" (OuterVolumeSpecName: "kube-api-access-4bvvq") pod "e77b6344-311f-4a32-b02e-ef1860c07144" (UID: "e77b6344-311f-4a32-b02e-ef1860c07144"). InnerVolumeSpecName "kube-api-access-4bvvq". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:44:07 crc kubenswrapper[4779]: I0929 19:44:07.945639 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e77b6344-311f-4a32-b02e-ef1860c07144-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "e77b6344-311f-4a32-b02e-ef1860c07144" (UID: "e77b6344-311f-4a32-b02e-ef1860c07144"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 19:44:07 crc kubenswrapper[4779]: I0929 19:44:07.946125 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e77b6344-311f-4a32-b02e-ef1860c07144-catalog-content\") pod \"e77b6344-311f-4a32-b02e-ef1860c07144\" (UID: \"e77b6344-311f-4a32-b02e-ef1860c07144\") " Sep 29 19:44:07 crc kubenswrapper[4779]: W0929 19:44:07.946257 4779 empty_dir.go:500] Warning: Unmount skipped because path does not exist: /var/lib/kubelet/pods/e77b6344-311f-4a32-b02e-ef1860c07144/volumes/kubernetes.io~empty-dir/catalog-content Sep 29 19:44:07 crc kubenswrapper[4779]: I0929 19:44:07.946268 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e77b6344-311f-4a32-b02e-ef1860c07144-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "e77b6344-311f-4a32-b02e-ef1860c07144" (UID: "e77b6344-311f-4a32-b02e-ef1860c07144"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 19:44:07 crc kubenswrapper[4779]: I0929 19:44:07.946601 4779 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e77b6344-311f-4a32-b02e-ef1860c07144-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 19:44:07 crc kubenswrapper[4779]: I0929 19:44:07.946620 4779 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e77b6344-311f-4a32-b02e-ef1860c07144-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 19:44:07 crc kubenswrapper[4779]: I0929 19:44:07.946632 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4bvvq\" (UniqueName: \"kubernetes.io/projected/e77b6344-311f-4a32-b02e-ef1860c07144-kube-api-access-4bvvq\") on node \"crc\" DevicePath \"\"" Sep 29 19:44:08 crc kubenswrapper[4779]: I0929 19:44:08.307219 4779 generic.go:334] "Generic (PLEG): container finished" podID="e77b6344-311f-4a32-b02e-ef1860c07144" containerID="4fc37563efc0a70044cba4af5fadaff356138c6d3e9b777350389e90ab2b50ae" exitCode=0 Sep 29 19:44:08 crc kubenswrapper[4779]: I0929 19:44:08.307267 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-jdzg7" event={"ID":"e77b6344-311f-4a32-b02e-ef1860c07144","Type":"ContainerDied","Data":"4fc37563efc0a70044cba4af5fadaff356138c6d3e9b777350389e90ab2b50ae"} Sep 29 19:44:08 crc kubenswrapper[4779]: I0929 19:44:08.307298 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-jdzg7" event={"ID":"e77b6344-311f-4a32-b02e-ef1860c07144","Type":"ContainerDied","Data":"6985df948d35d8e480f38b0c3c13d926d6eb86c8d40f95a217342ed04e2d84a1"} Sep 29 19:44:08 crc kubenswrapper[4779]: I0929 19:44:08.307311 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-jdzg7" Sep 29 19:44:08 crc kubenswrapper[4779]: I0929 19:44:08.307324 4779 scope.go:117] "RemoveContainer" containerID="4fc37563efc0a70044cba4af5fadaff356138c6d3e9b777350389e90ab2b50ae" Sep 29 19:44:08 crc kubenswrapper[4779]: I0929 19:44:08.356582 4779 scope.go:117] "RemoveContainer" containerID="972f09f967c13050232b9cfc7d7f842733a291828f534160945e483ccedfdd37" Sep 29 19:44:08 crc kubenswrapper[4779]: I0929 19:44:08.361661 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-jdzg7"] Sep 29 19:44:08 crc kubenswrapper[4779]: I0929 19:44:08.373492 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-jdzg7"] Sep 29 19:44:08 crc kubenswrapper[4779]: I0929 19:44:08.386166 4779 scope.go:117] "RemoveContainer" containerID="002320e8fdc0ecd14e603d0137c3f75046d59fa1dfcd36faf70874a815689f64" Sep 29 19:44:08 crc kubenswrapper[4779]: I0929 19:44:08.458276 4779 scope.go:117] "RemoveContainer" containerID="4fc37563efc0a70044cba4af5fadaff356138c6d3e9b777350389e90ab2b50ae" Sep 29 19:44:08 crc kubenswrapper[4779]: E0929 19:44:08.458831 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4fc37563efc0a70044cba4af5fadaff356138c6d3e9b777350389e90ab2b50ae\": container with ID starting with 4fc37563efc0a70044cba4af5fadaff356138c6d3e9b777350389e90ab2b50ae not found: ID does not exist" containerID="4fc37563efc0a70044cba4af5fadaff356138c6d3e9b777350389e90ab2b50ae" Sep 29 19:44:08 crc kubenswrapper[4779]: I0929 19:44:08.458874 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4fc37563efc0a70044cba4af5fadaff356138c6d3e9b777350389e90ab2b50ae"} err="failed to get container status \"4fc37563efc0a70044cba4af5fadaff356138c6d3e9b777350389e90ab2b50ae\": rpc error: code = NotFound desc = could not find container \"4fc37563efc0a70044cba4af5fadaff356138c6d3e9b777350389e90ab2b50ae\": container with ID starting with 4fc37563efc0a70044cba4af5fadaff356138c6d3e9b777350389e90ab2b50ae not found: ID does not exist" Sep 29 19:44:08 crc kubenswrapper[4779]: I0929 19:44:08.458895 4779 scope.go:117] "RemoveContainer" containerID="972f09f967c13050232b9cfc7d7f842733a291828f534160945e483ccedfdd37" Sep 29 19:44:08 crc kubenswrapper[4779]: E0929 19:44:08.459428 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"972f09f967c13050232b9cfc7d7f842733a291828f534160945e483ccedfdd37\": container with ID starting with 972f09f967c13050232b9cfc7d7f842733a291828f534160945e483ccedfdd37 not found: ID does not exist" containerID="972f09f967c13050232b9cfc7d7f842733a291828f534160945e483ccedfdd37" Sep 29 19:44:08 crc kubenswrapper[4779]: I0929 19:44:08.459486 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"972f09f967c13050232b9cfc7d7f842733a291828f534160945e483ccedfdd37"} err="failed to get container status \"972f09f967c13050232b9cfc7d7f842733a291828f534160945e483ccedfdd37\": rpc error: code = NotFound desc = could not find container \"972f09f967c13050232b9cfc7d7f842733a291828f534160945e483ccedfdd37\": container with ID starting with 972f09f967c13050232b9cfc7d7f842733a291828f534160945e483ccedfdd37 not found: ID does not exist" Sep 29 19:44:08 crc kubenswrapper[4779]: I0929 19:44:08.459526 4779 scope.go:117] "RemoveContainer" containerID="002320e8fdc0ecd14e603d0137c3f75046d59fa1dfcd36faf70874a815689f64" Sep 29 19:44:08 crc kubenswrapper[4779]: E0929 19:44:08.459965 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"002320e8fdc0ecd14e603d0137c3f75046d59fa1dfcd36faf70874a815689f64\": container with ID starting with 002320e8fdc0ecd14e603d0137c3f75046d59fa1dfcd36faf70874a815689f64 not found: ID does not exist" containerID="002320e8fdc0ecd14e603d0137c3f75046d59fa1dfcd36faf70874a815689f64" Sep 29 19:44:08 crc kubenswrapper[4779]: I0929 19:44:08.459991 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"002320e8fdc0ecd14e603d0137c3f75046d59fa1dfcd36faf70874a815689f64"} err="failed to get container status \"002320e8fdc0ecd14e603d0137c3f75046d59fa1dfcd36faf70874a815689f64\": rpc error: code = NotFound desc = could not find container \"002320e8fdc0ecd14e603d0137c3f75046d59fa1dfcd36faf70874a815689f64\": container with ID starting with 002320e8fdc0ecd14e603d0137c3f75046d59fa1dfcd36faf70874a815689f64 not found: ID does not exist" Sep 29 19:44:09 crc kubenswrapper[4779]: I0929 19:44:09.786156 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e77b6344-311f-4a32-b02e-ef1860c07144" path="/var/lib/kubelet/pods/e77b6344-311f-4a32-b02e-ef1860c07144/volumes" Sep 29 19:44:13 crc kubenswrapper[4779]: I0929 19:44:13.785248 4779 patch_prober.go:28] interesting pod/machine-config-daemon-d5cnr container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 19:44:13 crc kubenswrapper[4779]: I0929 19:44:13.785897 4779 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" podUID="476bc421-1113-455e-bcc8-e207e47dad19" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 19:44:43 crc kubenswrapper[4779]: I0929 19:44:43.785284 4779 patch_prober.go:28] interesting pod/machine-config-daemon-d5cnr container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 19:44:43 crc kubenswrapper[4779]: I0929 19:44:43.787605 4779 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" podUID="476bc421-1113-455e-bcc8-e207e47dad19" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 19:44:43 crc kubenswrapper[4779]: I0929 19:44:43.787685 4779 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" Sep 29 19:44:43 crc kubenswrapper[4779]: I0929 19:44:43.788728 4779 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"9fb2d77a4f4cf683107843d418292174e771334c111af4805d091312c11c1c0e"} pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 29 19:44:43 crc kubenswrapper[4779]: I0929 19:44:43.788801 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" podUID="476bc421-1113-455e-bcc8-e207e47dad19" containerName="machine-config-daemon" containerID="cri-o://9fb2d77a4f4cf683107843d418292174e771334c111af4805d091312c11c1c0e" gracePeriod=600 Sep 29 19:44:44 crc kubenswrapper[4779]: I0929 19:44:44.702361 4779 generic.go:334] "Generic (PLEG): container finished" podID="476bc421-1113-455e-bcc8-e207e47dad19" containerID="9fb2d77a4f4cf683107843d418292174e771334c111af4805d091312c11c1c0e" exitCode=0 Sep 29 19:44:44 crc kubenswrapper[4779]: I0929 19:44:44.702443 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" event={"ID":"476bc421-1113-455e-bcc8-e207e47dad19","Type":"ContainerDied","Data":"9fb2d77a4f4cf683107843d418292174e771334c111af4805d091312c11c1c0e"} Sep 29 19:44:44 crc kubenswrapper[4779]: I0929 19:44:44.703359 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" event={"ID":"476bc421-1113-455e-bcc8-e207e47dad19","Type":"ContainerStarted","Data":"01e960f6c47393436db220d868c528f2bb6c5c1ddb0bfa806d83addfe11f5c5c"} Sep 29 19:44:44 crc kubenswrapper[4779]: I0929 19:44:44.703430 4779 scope.go:117] "RemoveContainer" containerID="cabb38360f260f953132a0f1382a2873fbc40a1355bf1109d5e07b29c41a2fbb" Sep 29 19:44:53 crc kubenswrapper[4779]: I0929 19:44:53.396186 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-gnhlh"] Sep 29 19:44:53 crc kubenswrapper[4779]: E0929 19:44:53.397251 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e77b6344-311f-4a32-b02e-ef1860c07144" containerName="registry-server" Sep 29 19:44:53 crc kubenswrapper[4779]: I0929 19:44:53.397267 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="e77b6344-311f-4a32-b02e-ef1860c07144" containerName="registry-server" Sep 29 19:44:53 crc kubenswrapper[4779]: E0929 19:44:53.397293 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="223c1224-2bba-409f-8e83-0052eaaf2caa" containerName="extract-utilities" Sep 29 19:44:53 crc kubenswrapper[4779]: I0929 19:44:53.397303 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="223c1224-2bba-409f-8e83-0052eaaf2caa" containerName="extract-utilities" Sep 29 19:44:53 crc kubenswrapper[4779]: E0929 19:44:53.397312 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e77b6344-311f-4a32-b02e-ef1860c07144" containerName="extract-utilities" Sep 29 19:44:53 crc kubenswrapper[4779]: I0929 19:44:53.397339 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="e77b6344-311f-4a32-b02e-ef1860c07144" containerName="extract-utilities" Sep 29 19:44:53 crc kubenswrapper[4779]: E0929 19:44:53.397347 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e77b6344-311f-4a32-b02e-ef1860c07144" containerName="extract-content" Sep 29 19:44:53 crc kubenswrapper[4779]: I0929 19:44:53.397355 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="e77b6344-311f-4a32-b02e-ef1860c07144" containerName="extract-content" Sep 29 19:44:53 crc kubenswrapper[4779]: E0929 19:44:53.397388 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="223c1224-2bba-409f-8e83-0052eaaf2caa" containerName="registry-server" Sep 29 19:44:53 crc kubenswrapper[4779]: I0929 19:44:53.397395 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="223c1224-2bba-409f-8e83-0052eaaf2caa" containerName="registry-server" Sep 29 19:44:53 crc kubenswrapper[4779]: E0929 19:44:53.397411 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="223c1224-2bba-409f-8e83-0052eaaf2caa" containerName="extract-content" Sep 29 19:44:53 crc kubenswrapper[4779]: I0929 19:44:53.397418 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="223c1224-2bba-409f-8e83-0052eaaf2caa" containerName="extract-content" Sep 29 19:44:53 crc kubenswrapper[4779]: I0929 19:44:53.397698 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="e77b6344-311f-4a32-b02e-ef1860c07144" containerName="registry-server" Sep 29 19:44:53 crc kubenswrapper[4779]: I0929 19:44:53.397710 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="223c1224-2bba-409f-8e83-0052eaaf2caa" containerName="registry-server" Sep 29 19:44:53 crc kubenswrapper[4779]: I0929 19:44:53.399652 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-gnhlh" Sep 29 19:44:53 crc kubenswrapper[4779]: I0929 19:44:53.424388 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-gnhlh"] Sep 29 19:44:53 crc kubenswrapper[4779]: I0929 19:44:53.524202 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zm752\" (UniqueName: \"kubernetes.io/projected/eb3116a2-20c2-4963-b567-82e7623ec967-kube-api-access-zm752\") pod \"community-operators-gnhlh\" (UID: \"eb3116a2-20c2-4963-b567-82e7623ec967\") " pod="openshift-marketplace/community-operators-gnhlh" Sep 29 19:44:53 crc kubenswrapper[4779]: I0929 19:44:53.524413 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/eb3116a2-20c2-4963-b567-82e7623ec967-catalog-content\") pod \"community-operators-gnhlh\" (UID: \"eb3116a2-20c2-4963-b567-82e7623ec967\") " pod="openshift-marketplace/community-operators-gnhlh" Sep 29 19:44:53 crc kubenswrapper[4779]: I0929 19:44:53.524597 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/eb3116a2-20c2-4963-b567-82e7623ec967-utilities\") pod \"community-operators-gnhlh\" (UID: \"eb3116a2-20c2-4963-b567-82e7623ec967\") " pod="openshift-marketplace/community-operators-gnhlh" Sep 29 19:44:53 crc kubenswrapper[4779]: I0929 19:44:53.626440 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/eb3116a2-20c2-4963-b567-82e7623ec967-catalog-content\") pod \"community-operators-gnhlh\" (UID: \"eb3116a2-20c2-4963-b567-82e7623ec967\") " pod="openshift-marketplace/community-operators-gnhlh" Sep 29 19:44:53 crc kubenswrapper[4779]: I0929 19:44:53.626531 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/eb3116a2-20c2-4963-b567-82e7623ec967-utilities\") pod \"community-operators-gnhlh\" (UID: \"eb3116a2-20c2-4963-b567-82e7623ec967\") " pod="openshift-marketplace/community-operators-gnhlh" Sep 29 19:44:53 crc kubenswrapper[4779]: I0929 19:44:53.626636 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zm752\" (UniqueName: \"kubernetes.io/projected/eb3116a2-20c2-4963-b567-82e7623ec967-kube-api-access-zm752\") pod \"community-operators-gnhlh\" (UID: \"eb3116a2-20c2-4963-b567-82e7623ec967\") " pod="openshift-marketplace/community-operators-gnhlh" Sep 29 19:44:53 crc kubenswrapper[4779]: I0929 19:44:53.627078 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/eb3116a2-20c2-4963-b567-82e7623ec967-utilities\") pod \"community-operators-gnhlh\" (UID: \"eb3116a2-20c2-4963-b567-82e7623ec967\") " pod="openshift-marketplace/community-operators-gnhlh" Sep 29 19:44:53 crc kubenswrapper[4779]: I0929 19:44:53.627180 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/eb3116a2-20c2-4963-b567-82e7623ec967-catalog-content\") pod \"community-operators-gnhlh\" (UID: \"eb3116a2-20c2-4963-b567-82e7623ec967\") " pod="openshift-marketplace/community-operators-gnhlh" Sep 29 19:44:53 crc kubenswrapper[4779]: I0929 19:44:53.651142 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zm752\" (UniqueName: \"kubernetes.io/projected/eb3116a2-20c2-4963-b567-82e7623ec967-kube-api-access-zm752\") pod \"community-operators-gnhlh\" (UID: \"eb3116a2-20c2-4963-b567-82e7623ec967\") " pod="openshift-marketplace/community-operators-gnhlh" Sep 29 19:44:53 crc kubenswrapper[4779]: I0929 19:44:53.721954 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-gnhlh" Sep 29 19:44:54 crc kubenswrapper[4779]: I0929 19:44:54.260275 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-gnhlh"] Sep 29 19:44:54 crc kubenswrapper[4779]: I0929 19:44:54.832695 4779 generic.go:334] "Generic (PLEG): container finished" podID="eb3116a2-20c2-4963-b567-82e7623ec967" containerID="8c91fb2c84dae407794234f9fe2c6ce755dede6695b8a76f64baae0c92c80083" exitCode=0 Sep 29 19:44:54 crc kubenswrapper[4779]: I0929 19:44:54.832750 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-gnhlh" event={"ID":"eb3116a2-20c2-4963-b567-82e7623ec967","Type":"ContainerDied","Data":"8c91fb2c84dae407794234f9fe2c6ce755dede6695b8a76f64baae0c92c80083"} Sep 29 19:44:54 crc kubenswrapper[4779]: I0929 19:44:54.832810 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-gnhlh" event={"ID":"eb3116a2-20c2-4963-b567-82e7623ec967","Type":"ContainerStarted","Data":"cdddc17d4a7624a60e5d354a6c8ed87b13ef39b3260493c1dcece79c698e5bcd"} Sep 29 19:44:55 crc kubenswrapper[4779]: I0929 19:44:55.843209 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-gnhlh" event={"ID":"eb3116a2-20c2-4963-b567-82e7623ec967","Type":"ContainerStarted","Data":"335ad8e4d5822c3dc857afb7e1e3c4bd2d6f526db19cb380d2c0c2c3999b4ca4"} Sep 29 19:44:56 crc kubenswrapper[4779]: I0929 19:44:56.858763 4779 generic.go:334] "Generic (PLEG): container finished" podID="eb3116a2-20c2-4963-b567-82e7623ec967" containerID="335ad8e4d5822c3dc857afb7e1e3c4bd2d6f526db19cb380d2c0c2c3999b4ca4" exitCode=0 Sep 29 19:44:56 crc kubenswrapper[4779]: I0929 19:44:56.858839 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-gnhlh" event={"ID":"eb3116a2-20c2-4963-b567-82e7623ec967","Type":"ContainerDied","Data":"335ad8e4d5822c3dc857afb7e1e3c4bd2d6f526db19cb380d2c0c2c3999b4ca4"} Sep 29 19:44:57 crc kubenswrapper[4779]: I0929 19:44:57.870200 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-gnhlh" event={"ID":"eb3116a2-20c2-4963-b567-82e7623ec967","Type":"ContainerStarted","Data":"29a54190155cebf42479d3f9a73f87ec36fb2212b242674b157b275947c003cf"} Sep 29 19:44:57 crc kubenswrapper[4779]: I0929 19:44:57.899265 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-gnhlh" podStartSLOduration=2.312701744 podStartE2EDuration="4.899249253s" podCreationTimestamp="2025-09-29 19:44:53 +0000 UTC" firstStartedPulling="2025-09-29 19:44:54.834873851 +0000 UTC m=+2205.719298951" lastFinishedPulling="2025-09-29 19:44:57.42142133 +0000 UTC m=+2208.305846460" observedRunningTime="2025-09-29 19:44:57.893396353 +0000 UTC m=+2208.777821493" watchObservedRunningTime="2025-09-29 19:44:57.899249253 +0000 UTC m=+2208.783674353" Sep 29 19:45:00 crc kubenswrapper[4779]: I0929 19:45:00.142626 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319585-z9hnd"] Sep 29 19:45:00 crc kubenswrapper[4779]: I0929 19:45:00.144041 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319585-z9hnd" Sep 29 19:45:00 crc kubenswrapper[4779]: I0929 19:45:00.147588 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Sep 29 19:45:00 crc kubenswrapper[4779]: I0929 19:45:00.151334 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Sep 29 19:45:00 crc kubenswrapper[4779]: I0929 19:45:00.176677 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319585-z9hnd"] Sep 29 19:45:00 crc kubenswrapper[4779]: I0929 19:45:00.280446 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/d6258606-2aee-4ffe-89f2-e4ed3850017c-secret-volume\") pod \"collect-profiles-29319585-z9hnd\" (UID: \"d6258606-2aee-4ffe-89f2-e4ed3850017c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319585-z9hnd" Sep 29 19:45:00 crc kubenswrapper[4779]: I0929 19:45:00.280557 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/d6258606-2aee-4ffe-89f2-e4ed3850017c-config-volume\") pod \"collect-profiles-29319585-z9hnd\" (UID: \"d6258606-2aee-4ffe-89f2-e4ed3850017c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319585-z9hnd" Sep 29 19:45:00 crc kubenswrapper[4779]: I0929 19:45:00.280709 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s5j6t\" (UniqueName: \"kubernetes.io/projected/d6258606-2aee-4ffe-89f2-e4ed3850017c-kube-api-access-s5j6t\") pod \"collect-profiles-29319585-z9hnd\" (UID: \"d6258606-2aee-4ffe-89f2-e4ed3850017c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319585-z9hnd" Sep 29 19:45:00 crc kubenswrapper[4779]: I0929 19:45:00.383123 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/d6258606-2aee-4ffe-89f2-e4ed3850017c-config-volume\") pod \"collect-profiles-29319585-z9hnd\" (UID: \"d6258606-2aee-4ffe-89f2-e4ed3850017c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319585-z9hnd" Sep 29 19:45:00 crc kubenswrapper[4779]: I0929 19:45:00.383333 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s5j6t\" (UniqueName: \"kubernetes.io/projected/d6258606-2aee-4ffe-89f2-e4ed3850017c-kube-api-access-s5j6t\") pod \"collect-profiles-29319585-z9hnd\" (UID: \"d6258606-2aee-4ffe-89f2-e4ed3850017c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319585-z9hnd" Sep 29 19:45:00 crc kubenswrapper[4779]: I0929 19:45:00.383534 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/d6258606-2aee-4ffe-89f2-e4ed3850017c-secret-volume\") pod \"collect-profiles-29319585-z9hnd\" (UID: \"d6258606-2aee-4ffe-89f2-e4ed3850017c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319585-z9hnd" Sep 29 19:45:00 crc kubenswrapper[4779]: I0929 19:45:00.383980 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/d6258606-2aee-4ffe-89f2-e4ed3850017c-config-volume\") pod \"collect-profiles-29319585-z9hnd\" (UID: \"d6258606-2aee-4ffe-89f2-e4ed3850017c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319585-z9hnd" Sep 29 19:45:00 crc kubenswrapper[4779]: I0929 19:45:00.392746 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/d6258606-2aee-4ffe-89f2-e4ed3850017c-secret-volume\") pod \"collect-profiles-29319585-z9hnd\" (UID: \"d6258606-2aee-4ffe-89f2-e4ed3850017c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319585-z9hnd" Sep 29 19:45:00 crc kubenswrapper[4779]: I0929 19:45:00.402069 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s5j6t\" (UniqueName: \"kubernetes.io/projected/d6258606-2aee-4ffe-89f2-e4ed3850017c-kube-api-access-s5j6t\") pod \"collect-profiles-29319585-z9hnd\" (UID: \"d6258606-2aee-4ffe-89f2-e4ed3850017c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319585-z9hnd" Sep 29 19:45:00 crc kubenswrapper[4779]: I0929 19:45:00.472637 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319585-z9hnd" Sep 29 19:45:01 crc kubenswrapper[4779]: I0929 19:45:01.015846 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319585-z9hnd"] Sep 29 19:45:01 crc kubenswrapper[4779]: W0929 19:45:01.024506 4779 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd6258606_2aee_4ffe_89f2_e4ed3850017c.slice/crio-29accfcf4b99b85f8fb8f8ceba7ee0fffd2c4d787352a84688acf8a2569ca457 WatchSource:0}: Error finding container 29accfcf4b99b85f8fb8f8ceba7ee0fffd2c4d787352a84688acf8a2569ca457: Status 404 returned error can't find the container with id 29accfcf4b99b85f8fb8f8ceba7ee0fffd2c4d787352a84688acf8a2569ca457 Sep 29 19:45:01 crc kubenswrapper[4779]: I0929 19:45:01.925670 4779 generic.go:334] "Generic (PLEG): container finished" podID="d6258606-2aee-4ffe-89f2-e4ed3850017c" containerID="fad6cd94a3b4b22ba60a3df2425c587bbaaec7885ea63b2196f097b8876b860c" exitCode=0 Sep 29 19:45:01 crc kubenswrapper[4779]: I0929 19:45:01.925752 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29319585-z9hnd" event={"ID":"d6258606-2aee-4ffe-89f2-e4ed3850017c","Type":"ContainerDied","Data":"fad6cd94a3b4b22ba60a3df2425c587bbaaec7885ea63b2196f097b8876b860c"} Sep 29 19:45:01 crc kubenswrapper[4779]: I0929 19:45:01.926032 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29319585-z9hnd" event={"ID":"d6258606-2aee-4ffe-89f2-e4ed3850017c","Type":"ContainerStarted","Data":"29accfcf4b99b85f8fb8f8ceba7ee0fffd2c4d787352a84688acf8a2569ca457"} Sep 29 19:45:03 crc kubenswrapper[4779]: I0929 19:45:03.305347 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319585-z9hnd" Sep 29 19:45:03 crc kubenswrapper[4779]: I0929 19:45:03.438328 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/d6258606-2aee-4ffe-89f2-e4ed3850017c-config-volume\") pod \"d6258606-2aee-4ffe-89f2-e4ed3850017c\" (UID: \"d6258606-2aee-4ffe-89f2-e4ed3850017c\") " Sep 29 19:45:03 crc kubenswrapper[4779]: I0929 19:45:03.438438 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s5j6t\" (UniqueName: \"kubernetes.io/projected/d6258606-2aee-4ffe-89f2-e4ed3850017c-kube-api-access-s5j6t\") pod \"d6258606-2aee-4ffe-89f2-e4ed3850017c\" (UID: \"d6258606-2aee-4ffe-89f2-e4ed3850017c\") " Sep 29 19:45:03 crc kubenswrapper[4779]: I0929 19:45:03.438485 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/d6258606-2aee-4ffe-89f2-e4ed3850017c-secret-volume\") pod \"d6258606-2aee-4ffe-89f2-e4ed3850017c\" (UID: \"d6258606-2aee-4ffe-89f2-e4ed3850017c\") " Sep 29 19:45:03 crc kubenswrapper[4779]: I0929 19:45:03.439631 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d6258606-2aee-4ffe-89f2-e4ed3850017c-config-volume" (OuterVolumeSpecName: "config-volume") pod "d6258606-2aee-4ffe-89f2-e4ed3850017c" (UID: "d6258606-2aee-4ffe-89f2-e4ed3850017c"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:45:03 crc kubenswrapper[4779]: I0929 19:45:03.444369 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d6258606-2aee-4ffe-89f2-e4ed3850017c-kube-api-access-s5j6t" (OuterVolumeSpecName: "kube-api-access-s5j6t") pod "d6258606-2aee-4ffe-89f2-e4ed3850017c" (UID: "d6258606-2aee-4ffe-89f2-e4ed3850017c"). InnerVolumeSpecName "kube-api-access-s5j6t". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:45:03 crc kubenswrapper[4779]: I0929 19:45:03.446149 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d6258606-2aee-4ffe-89f2-e4ed3850017c-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "d6258606-2aee-4ffe-89f2-e4ed3850017c" (UID: "d6258606-2aee-4ffe-89f2-e4ed3850017c"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:45:03 crc kubenswrapper[4779]: I0929 19:45:03.540788 4779 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/d6258606-2aee-4ffe-89f2-e4ed3850017c-config-volume\") on node \"crc\" DevicePath \"\"" Sep 29 19:45:03 crc kubenswrapper[4779]: I0929 19:45:03.540822 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s5j6t\" (UniqueName: \"kubernetes.io/projected/d6258606-2aee-4ffe-89f2-e4ed3850017c-kube-api-access-s5j6t\") on node \"crc\" DevicePath \"\"" Sep 29 19:45:03 crc kubenswrapper[4779]: I0929 19:45:03.540834 4779 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/d6258606-2aee-4ffe-89f2-e4ed3850017c-secret-volume\") on node \"crc\" DevicePath \"\"" Sep 29 19:45:03 crc kubenswrapper[4779]: I0929 19:45:03.722973 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-gnhlh" Sep 29 19:45:03 crc kubenswrapper[4779]: I0929 19:45:03.723033 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-gnhlh" Sep 29 19:45:03 crc kubenswrapper[4779]: I0929 19:45:03.789165 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-gnhlh" Sep 29 19:45:03 crc kubenswrapper[4779]: I0929 19:45:03.954702 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29319585-z9hnd" event={"ID":"d6258606-2aee-4ffe-89f2-e4ed3850017c","Type":"ContainerDied","Data":"29accfcf4b99b85f8fb8f8ceba7ee0fffd2c4d787352a84688acf8a2569ca457"} Sep 29 19:45:03 crc kubenswrapper[4779]: I0929 19:45:03.954770 4779 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="29accfcf4b99b85f8fb8f8ceba7ee0fffd2c4d787352a84688acf8a2569ca457" Sep 29 19:45:03 crc kubenswrapper[4779]: I0929 19:45:03.954820 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319585-z9hnd" Sep 29 19:45:04 crc kubenswrapper[4779]: I0929 19:45:04.029533 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-gnhlh" Sep 29 19:45:04 crc kubenswrapper[4779]: I0929 19:45:04.099149 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-gnhlh"] Sep 29 19:45:04 crc kubenswrapper[4779]: I0929 19:45:04.406506 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319540-c6vdh"] Sep 29 19:45:04 crc kubenswrapper[4779]: I0929 19:45:04.421630 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319540-c6vdh"] Sep 29 19:45:05 crc kubenswrapper[4779]: I0929 19:45:05.780868 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2d999e79-a467-4f19-a67a-f5993c6b4423" path="/var/lib/kubelet/pods/2d999e79-a467-4f19-a67a-f5993c6b4423/volumes" Sep 29 19:45:05 crc kubenswrapper[4779]: I0929 19:45:05.984529 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-gnhlh" podUID="eb3116a2-20c2-4963-b567-82e7623ec967" containerName="registry-server" containerID="cri-o://29a54190155cebf42479d3f9a73f87ec36fb2212b242674b157b275947c003cf" gracePeriod=2 Sep 29 19:45:06 crc kubenswrapper[4779]: I0929 19:45:06.996222 4779 generic.go:334] "Generic (PLEG): container finished" podID="eb3116a2-20c2-4963-b567-82e7623ec967" containerID="29a54190155cebf42479d3f9a73f87ec36fb2212b242674b157b275947c003cf" exitCode=0 Sep 29 19:45:06 crc kubenswrapper[4779]: I0929 19:45:06.996396 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-gnhlh" event={"ID":"eb3116a2-20c2-4963-b567-82e7623ec967","Type":"ContainerDied","Data":"29a54190155cebf42479d3f9a73f87ec36fb2212b242674b157b275947c003cf"} Sep 29 19:45:06 crc kubenswrapper[4779]: I0929 19:45:06.996562 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-gnhlh" event={"ID":"eb3116a2-20c2-4963-b567-82e7623ec967","Type":"ContainerDied","Data":"cdddc17d4a7624a60e5d354a6c8ed87b13ef39b3260493c1dcece79c698e5bcd"} Sep 29 19:45:06 crc kubenswrapper[4779]: I0929 19:45:06.996579 4779 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="cdddc17d4a7624a60e5d354a6c8ed87b13ef39b3260493c1dcece79c698e5bcd" Sep 29 19:45:07 crc kubenswrapper[4779]: I0929 19:45:07.020469 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-gnhlh" Sep 29 19:45:07 crc kubenswrapper[4779]: I0929 19:45:07.119284 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/eb3116a2-20c2-4963-b567-82e7623ec967-catalog-content\") pod \"eb3116a2-20c2-4963-b567-82e7623ec967\" (UID: \"eb3116a2-20c2-4963-b567-82e7623ec967\") " Sep 29 19:45:07 crc kubenswrapper[4779]: I0929 19:45:07.119500 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zm752\" (UniqueName: \"kubernetes.io/projected/eb3116a2-20c2-4963-b567-82e7623ec967-kube-api-access-zm752\") pod \"eb3116a2-20c2-4963-b567-82e7623ec967\" (UID: \"eb3116a2-20c2-4963-b567-82e7623ec967\") " Sep 29 19:45:07 crc kubenswrapper[4779]: I0929 19:45:07.119740 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/eb3116a2-20c2-4963-b567-82e7623ec967-utilities\") pod \"eb3116a2-20c2-4963-b567-82e7623ec967\" (UID: \"eb3116a2-20c2-4963-b567-82e7623ec967\") " Sep 29 19:45:07 crc kubenswrapper[4779]: I0929 19:45:07.120631 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/eb3116a2-20c2-4963-b567-82e7623ec967-utilities" (OuterVolumeSpecName: "utilities") pod "eb3116a2-20c2-4963-b567-82e7623ec967" (UID: "eb3116a2-20c2-4963-b567-82e7623ec967"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 19:45:07 crc kubenswrapper[4779]: I0929 19:45:07.125978 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/eb3116a2-20c2-4963-b567-82e7623ec967-kube-api-access-zm752" (OuterVolumeSpecName: "kube-api-access-zm752") pod "eb3116a2-20c2-4963-b567-82e7623ec967" (UID: "eb3116a2-20c2-4963-b567-82e7623ec967"). InnerVolumeSpecName "kube-api-access-zm752". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:45:07 crc kubenswrapper[4779]: I0929 19:45:07.186853 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/eb3116a2-20c2-4963-b567-82e7623ec967-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "eb3116a2-20c2-4963-b567-82e7623ec967" (UID: "eb3116a2-20c2-4963-b567-82e7623ec967"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 19:45:07 crc kubenswrapper[4779]: I0929 19:45:07.223217 4779 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/eb3116a2-20c2-4963-b567-82e7623ec967-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 19:45:07 crc kubenswrapper[4779]: I0929 19:45:07.223261 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zm752\" (UniqueName: \"kubernetes.io/projected/eb3116a2-20c2-4963-b567-82e7623ec967-kube-api-access-zm752\") on node \"crc\" DevicePath \"\"" Sep 29 19:45:07 crc kubenswrapper[4779]: I0929 19:45:07.223275 4779 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/eb3116a2-20c2-4963-b567-82e7623ec967-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 19:45:08 crc kubenswrapper[4779]: I0929 19:45:08.016681 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-gnhlh" Sep 29 19:45:08 crc kubenswrapper[4779]: I0929 19:45:08.052378 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-gnhlh"] Sep 29 19:45:08 crc kubenswrapper[4779]: I0929 19:45:08.059135 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-gnhlh"] Sep 29 19:45:09 crc kubenswrapper[4779]: I0929 19:45:09.783810 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="eb3116a2-20c2-4963-b567-82e7623ec967" path="/var/lib/kubelet/pods/eb3116a2-20c2-4963-b567-82e7623ec967/volumes" Sep 29 19:45:16 crc kubenswrapper[4779]: I0929 19:45:16.892345 4779 scope.go:117] "RemoveContainer" containerID="cd53d2289ffddfa4e5198db41eeb503c449f0e6870cff978d2687dd64c7d6c42" Sep 29 19:46:14 crc kubenswrapper[4779]: I0929 19:46:14.758590 4779 generic.go:334] "Generic (PLEG): container finished" podID="7ff61060-93f6-4bd6-a6f9-75195322a8d2" containerID="6e1e8d619b02fff62b076ca07fa7c977da40e3f70d5a18de585946b87e10b666" exitCode=0 Sep 29 19:46:14 crc kubenswrapper[4779]: I0929 19:46:14.758709 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-r5m6z" event={"ID":"7ff61060-93f6-4bd6-a6f9-75195322a8d2","Type":"ContainerDied","Data":"6e1e8d619b02fff62b076ca07fa7c977da40e3f70d5a18de585946b87e10b666"} Sep 29 19:46:16 crc kubenswrapper[4779]: I0929 19:46:16.273930 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-r5m6z" Sep 29 19:46:16 crc kubenswrapper[4779]: I0929 19:46:16.408891 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/7ff61060-93f6-4bd6-a6f9-75195322a8d2-libvirt-secret-0\") pod \"7ff61060-93f6-4bd6-a6f9-75195322a8d2\" (UID: \"7ff61060-93f6-4bd6-a6f9-75195322a8d2\") " Sep 29 19:46:16 crc kubenswrapper[4779]: I0929 19:46:16.408973 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/7ff61060-93f6-4bd6-a6f9-75195322a8d2-ssh-key\") pod \"7ff61060-93f6-4bd6-a6f9-75195322a8d2\" (UID: \"7ff61060-93f6-4bd6-a6f9-75195322a8d2\") " Sep 29 19:46:16 crc kubenswrapper[4779]: I0929 19:46:16.409035 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/7ff61060-93f6-4bd6-a6f9-75195322a8d2-inventory\") pod \"7ff61060-93f6-4bd6-a6f9-75195322a8d2\" (UID: \"7ff61060-93f6-4bd6-a6f9-75195322a8d2\") " Sep 29 19:46:16 crc kubenswrapper[4779]: I0929 19:46:16.409122 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7ff61060-93f6-4bd6-a6f9-75195322a8d2-libvirt-combined-ca-bundle\") pod \"7ff61060-93f6-4bd6-a6f9-75195322a8d2\" (UID: \"7ff61060-93f6-4bd6-a6f9-75195322a8d2\") " Sep 29 19:46:16 crc kubenswrapper[4779]: I0929 19:46:16.409294 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dcmtr\" (UniqueName: \"kubernetes.io/projected/7ff61060-93f6-4bd6-a6f9-75195322a8d2-kube-api-access-dcmtr\") pod \"7ff61060-93f6-4bd6-a6f9-75195322a8d2\" (UID: \"7ff61060-93f6-4bd6-a6f9-75195322a8d2\") " Sep 29 19:46:16 crc kubenswrapper[4779]: I0929 19:46:16.426029 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7ff61060-93f6-4bd6-a6f9-75195322a8d2-libvirt-combined-ca-bundle" (OuterVolumeSpecName: "libvirt-combined-ca-bundle") pod "7ff61060-93f6-4bd6-a6f9-75195322a8d2" (UID: "7ff61060-93f6-4bd6-a6f9-75195322a8d2"). InnerVolumeSpecName "libvirt-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:46:16 crc kubenswrapper[4779]: I0929 19:46:16.426238 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7ff61060-93f6-4bd6-a6f9-75195322a8d2-kube-api-access-dcmtr" (OuterVolumeSpecName: "kube-api-access-dcmtr") pod "7ff61060-93f6-4bd6-a6f9-75195322a8d2" (UID: "7ff61060-93f6-4bd6-a6f9-75195322a8d2"). InnerVolumeSpecName "kube-api-access-dcmtr". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:46:16 crc kubenswrapper[4779]: I0929 19:46:16.443826 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7ff61060-93f6-4bd6-a6f9-75195322a8d2-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "7ff61060-93f6-4bd6-a6f9-75195322a8d2" (UID: "7ff61060-93f6-4bd6-a6f9-75195322a8d2"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:46:16 crc kubenswrapper[4779]: I0929 19:46:16.463327 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7ff61060-93f6-4bd6-a6f9-75195322a8d2-libvirt-secret-0" (OuterVolumeSpecName: "libvirt-secret-0") pod "7ff61060-93f6-4bd6-a6f9-75195322a8d2" (UID: "7ff61060-93f6-4bd6-a6f9-75195322a8d2"). InnerVolumeSpecName "libvirt-secret-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:46:16 crc kubenswrapper[4779]: I0929 19:46:16.473045 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7ff61060-93f6-4bd6-a6f9-75195322a8d2-inventory" (OuterVolumeSpecName: "inventory") pod "7ff61060-93f6-4bd6-a6f9-75195322a8d2" (UID: "7ff61060-93f6-4bd6-a6f9-75195322a8d2"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:46:16 crc kubenswrapper[4779]: I0929 19:46:16.512709 4779 reconciler_common.go:293] "Volume detached for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/7ff61060-93f6-4bd6-a6f9-75195322a8d2-libvirt-secret-0\") on node \"crc\" DevicePath \"\"" Sep 29 19:46:16 crc kubenswrapper[4779]: I0929 19:46:16.512746 4779 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/7ff61060-93f6-4bd6-a6f9-75195322a8d2-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 29 19:46:16 crc kubenswrapper[4779]: I0929 19:46:16.512759 4779 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/7ff61060-93f6-4bd6-a6f9-75195322a8d2-inventory\") on node \"crc\" DevicePath \"\"" Sep 29 19:46:16 crc kubenswrapper[4779]: I0929 19:46:16.512775 4779 reconciler_common.go:293] "Volume detached for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7ff61060-93f6-4bd6-a6f9-75195322a8d2-libvirt-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 19:46:16 crc kubenswrapper[4779]: I0929 19:46:16.512790 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dcmtr\" (UniqueName: \"kubernetes.io/projected/7ff61060-93f6-4bd6-a6f9-75195322a8d2-kube-api-access-dcmtr\") on node \"crc\" DevicePath \"\"" Sep 29 19:46:16 crc kubenswrapper[4779]: I0929 19:46:16.783241 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-r5m6z" event={"ID":"7ff61060-93f6-4bd6-a6f9-75195322a8d2","Type":"ContainerDied","Data":"bd3e44e27a96f5d2cefd65a8b8c5b0854e4d298afdeb3a1f45215167f4d12873"} Sep 29 19:46:16 crc kubenswrapper[4779]: I0929 19:46:16.783282 4779 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="bd3e44e27a96f5d2cefd65a8b8c5b0854e4d298afdeb3a1f45215167f4d12873" Sep 29 19:46:16 crc kubenswrapper[4779]: I0929 19:46:16.783824 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-r5m6z" Sep 29 19:46:16 crc kubenswrapper[4779]: I0929 19:46:16.883958 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-edpm-deployment-openstack-edpm-ipam-h4cr7"] Sep 29 19:46:16 crc kubenswrapper[4779]: E0929 19:46:16.884649 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d6258606-2aee-4ffe-89f2-e4ed3850017c" containerName="collect-profiles" Sep 29 19:46:16 crc kubenswrapper[4779]: I0929 19:46:16.884689 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="d6258606-2aee-4ffe-89f2-e4ed3850017c" containerName="collect-profiles" Sep 29 19:46:16 crc kubenswrapper[4779]: E0929 19:46:16.884738 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7ff61060-93f6-4bd6-a6f9-75195322a8d2" containerName="libvirt-edpm-deployment-openstack-edpm-ipam" Sep 29 19:46:16 crc kubenswrapper[4779]: I0929 19:46:16.884752 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="7ff61060-93f6-4bd6-a6f9-75195322a8d2" containerName="libvirt-edpm-deployment-openstack-edpm-ipam" Sep 29 19:46:16 crc kubenswrapper[4779]: E0929 19:46:16.884782 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eb3116a2-20c2-4963-b567-82e7623ec967" containerName="registry-server" Sep 29 19:46:16 crc kubenswrapper[4779]: I0929 19:46:16.884794 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="eb3116a2-20c2-4963-b567-82e7623ec967" containerName="registry-server" Sep 29 19:46:16 crc kubenswrapper[4779]: E0929 19:46:16.884814 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eb3116a2-20c2-4963-b567-82e7623ec967" containerName="extract-utilities" Sep 29 19:46:16 crc kubenswrapper[4779]: I0929 19:46:16.884826 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="eb3116a2-20c2-4963-b567-82e7623ec967" containerName="extract-utilities" Sep 29 19:46:16 crc kubenswrapper[4779]: E0929 19:46:16.884880 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eb3116a2-20c2-4963-b567-82e7623ec967" containerName="extract-content" Sep 29 19:46:16 crc kubenswrapper[4779]: I0929 19:46:16.884893 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="eb3116a2-20c2-4963-b567-82e7623ec967" containerName="extract-content" Sep 29 19:46:16 crc kubenswrapper[4779]: I0929 19:46:16.885196 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="d6258606-2aee-4ffe-89f2-e4ed3850017c" containerName="collect-profiles" Sep 29 19:46:16 crc kubenswrapper[4779]: I0929 19:46:16.885224 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="7ff61060-93f6-4bd6-a6f9-75195322a8d2" containerName="libvirt-edpm-deployment-openstack-edpm-ipam" Sep 29 19:46:16 crc kubenswrapper[4779]: I0929 19:46:16.885254 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="eb3116a2-20c2-4963-b567-82e7623ec967" containerName="registry-server" Sep 29 19:46:16 crc kubenswrapper[4779]: I0929 19:46:16.886364 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-h4cr7" Sep 29 19:46:16 crc kubenswrapper[4779]: I0929 19:46:16.889496 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Sep 29 19:46:16 crc kubenswrapper[4779]: I0929 19:46:16.889835 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-migration-ssh-key" Sep 29 19:46:16 crc kubenswrapper[4779]: I0929 19:46:16.889857 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"nova-extra-config" Sep 29 19:46:16 crc kubenswrapper[4779]: I0929 19:46:16.890190 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 29 19:46:16 crc kubenswrapper[4779]: I0929 19:46:16.890447 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-v8454" Sep 29 19:46:16 crc kubenswrapper[4779]: I0929 19:46:16.890624 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Sep 29 19:46:16 crc kubenswrapper[4779]: I0929 19:46:16.895363 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-compute-config" Sep 29 19:46:16 crc kubenswrapper[4779]: I0929 19:46:16.901158 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-edpm-deployment-openstack-edpm-ipam-h4cr7"] Sep 29 19:46:17 crc kubenswrapper[4779]: I0929 19:46:17.022121 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/06ca3195-0d79-4376-9627-6075a8cdf09c-inventory\") pod \"nova-edpm-deployment-openstack-edpm-ipam-h4cr7\" (UID: \"06ca3195-0d79-4376-9627-6075a8cdf09c\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-h4cr7" Sep 29 19:46:17 crc kubenswrapper[4779]: I0929 19:46:17.022583 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/06ca3195-0d79-4376-9627-6075a8cdf09c-nova-extra-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-h4cr7\" (UID: \"06ca3195-0d79-4376-9627-6075a8cdf09c\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-h4cr7" Sep 29 19:46:17 crc kubenswrapper[4779]: I0929 19:46:17.022626 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/06ca3195-0d79-4376-9627-6075a8cdf09c-nova-cell1-compute-config-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-h4cr7\" (UID: \"06ca3195-0d79-4376-9627-6075a8cdf09c\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-h4cr7" Sep 29 19:46:17 crc kubenswrapper[4779]: I0929 19:46:17.022658 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/06ca3195-0d79-4376-9627-6075a8cdf09c-nova-migration-ssh-key-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-h4cr7\" (UID: \"06ca3195-0d79-4376-9627-6075a8cdf09c\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-h4cr7" Sep 29 19:46:17 crc kubenswrapper[4779]: I0929 19:46:17.022687 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/06ca3195-0d79-4376-9627-6075a8cdf09c-nova-migration-ssh-key-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-h4cr7\" (UID: \"06ca3195-0d79-4376-9627-6075a8cdf09c\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-h4cr7" Sep 29 19:46:17 crc kubenswrapper[4779]: I0929 19:46:17.022819 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/06ca3195-0d79-4376-9627-6075a8cdf09c-nova-combined-ca-bundle\") pod \"nova-edpm-deployment-openstack-edpm-ipam-h4cr7\" (UID: \"06ca3195-0d79-4376-9627-6075a8cdf09c\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-h4cr7" Sep 29 19:46:17 crc kubenswrapper[4779]: I0929 19:46:17.022859 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gbh2j\" (UniqueName: \"kubernetes.io/projected/06ca3195-0d79-4376-9627-6075a8cdf09c-kube-api-access-gbh2j\") pod \"nova-edpm-deployment-openstack-edpm-ipam-h4cr7\" (UID: \"06ca3195-0d79-4376-9627-6075a8cdf09c\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-h4cr7" Sep 29 19:46:17 crc kubenswrapper[4779]: I0929 19:46:17.022892 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/06ca3195-0d79-4376-9627-6075a8cdf09c-ssh-key\") pod \"nova-edpm-deployment-openstack-edpm-ipam-h4cr7\" (UID: \"06ca3195-0d79-4376-9627-6075a8cdf09c\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-h4cr7" Sep 29 19:46:17 crc kubenswrapper[4779]: I0929 19:46:17.022947 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/06ca3195-0d79-4376-9627-6075a8cdf09c-nova-cell1-compute-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-h4cr7\" (UID: \"06ca3195-0d79-4376-9627-6075a8cdf09c\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-h4cr7" Sep 29 19:46:17 crc kubenswrapper[4779]: I0929 19:46:17.125214 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/06ca3195-0d79-4376-9627-6075a8cdf09c-nova-cell1-compute-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-h4cr7\" (UID: \"06ca3195-0d79-4376-9627-6075a8cdf09c\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-h4cr7" Sep 29 19:46:17 crc kubenswrapper[4779]: I0929 19:46:17.125609 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/06ca3195-0d79-4376-9627-6075a8cdf09c-inventory\") pod \"nova-edpm-deployment-openstack-edpm-ipam-h4cr7\" (UID: \"06ca3195-0d79-4376-9627-6075a8cdf09c\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-h4cr7" Sep 29 19:46:17 crc kubenswrapper[4779]: I0929 19:46:17.125747 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/06ca3195-0d79-4376-9627-6075a8cdf09c-nova-extra-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-h4cr7\" (UID: \"06ca3195-0d79-4376-9627-6075a8cdf09c\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-h4cr7" Sep 29 19:46:17 crc kubenswrapper[4779]: I0929 19:46:17.125863 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/06ca3195-0d79-4376-9627-6075a8cdf09c-nova-cell1-compute-config-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-h4cr7\" (UID: \"06ca3195-0d79-4376-9627-6075a8cdf09c\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-h4cr7" Sep 29 19:46:17 crc kubenswrapper[4779]: I0929 19:46:17.125990 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/06ca3195-0d79-4376-9627-6075a8cdf09c-nova-migration-ssh-key-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-h4cr7\" (UID: \"06ca3195-0d79-4376-9627-6075a8cdf09c\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-h4cr7" Sep 29 19:46:17 crc kubenswrapper[4779]: I0929 19:46:17.126102 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/06ca3195-0d79-4376-9627-6075a8cdf09c-nova-migration-ssh-key-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-h4cr7\" (UID: \"06ca3195-0d79-4376-9627-6075a8cdf09c\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-h4cr7" Sep 29 19:46:17 crc kubenswrapper[4779]: I0929 19:46:17.126356 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/06ca3195-0d79-4376-9627-6075a8cdf09c-nova-combined-ca-bundle\") pod \"nova-edpm-deployment-openstack-edpm-ipam-h4cr7\" (UID: \"06ca3195-0d79-4376-9627-6075a8cdf09c\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-h4cr7" Sep 29 19:46:17 crc kubenswrapper[4779]: I0929 19:46:17.126496 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gbh2j\" (UniqueName: \"kubernetes.io/projected/06ca3195-0d79-4376-9627-6075a8cdf09c-kube-api-access-gbh2j\") pod \"nova-edpm-deployment-openstack-edpm-ipam-h4cr7\" (UID: \"06ca3195-0d79-4376-9627-6075a8cdf09c\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-h4cr7" Sep 29 19:46:17 crc kubenswrapper[4779]: I0929 19:46:17.126606 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/06ca3195-0d79-4376-9627-6075a8cdf09c-ssh-key\") pod \"nova-edpm-deployment-openstack-edpm-ipam-h4cr7\" (UID: \"06ca3195-0d79-4376-9627-6075a8cdf09c\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-h4cr7" Sep 29 19:46:17 crc kubenswrapper[4779]: I0929 19:46:17.127038 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/06ca3195-0d79-4376-9627-6075a8cdf09c-nova-extra-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-h4cr7\" (UID: \"06ca3195-0d79-4376-9627-6075a8cdf09c\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-h4cr7" Sep 29 19:46:17 crc kubenswrapper[4779]: I0929 19:46:17.130412 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/06ca3195-0d79-4376-9627-6075a8cdf09c-nova-migration-ssh-key-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-h4cr7\" (UID: \"06ca3195-0d79-4376-9627-6075a8cdf09c\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-h4cr7" Sep 29 19:46:17 crc kubenswrapper[4779]: I0929 19:46:17.130453 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/06ca3195-0d79-4376-9627-6075a8cdf09c-nova-cell1-compute-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-h4cr7\" (UID: \"06ca3195-0d79-4376-9627-6075a8cdf09c\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-h4cr7" Sep 29 19:46:17 crc kubenswrapper[4779]: I0929 19:46:17.130922 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/06ca3195-0d79-4376-9627-6075a8cdf09c-inventory\") pod \"nova-edpm-deployment-openstack-edpm-ipam-h4cr7\" (UID: \"06ca3195-0d79-4376-9627-6075a8cdf09c\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-h4cr7" Sep 29 19:46:17 crc kubenswrapper[4779]: I0929 19:46:17.131251 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/06ca3195-0d79-4376-9627-6075a8cdf09c-nova-migration-ssh-key-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-h4cr7\" (UID: \"06ca3195-0d79-4376-9627-6075a8cdf09c\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-h4cr7" Sep 29 19:46:17 crc kubenswrapper[4779]: I0929 19:46:17.131538 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/06ca3195-0d79-4376-9627-6075a8cdf09c-nova-cell1-compute-config-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-h4cr7\" (UID: \"06ca3195-0d79-4376-9627-6075a8cdf09c\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-h4cr7" Sep 29 19:46:17 crc kubenswrapper[4779]: I0929 19:46:17.132195 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/06ca3195-0d79-4376-9627-6075a8cdf09c-nova-combined-ca-bundle\") pod \"nova-edpm-deployment-openstack-edpm-ipam-h4cr7\" (UID: \"06ca3195-0d79-4376-9627-6075a8cdf09c\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-h4cr7" Sep 29 19:46:17 crc kubenswrapper[4779]: I0929 19:46:17.132794 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/06ca3195-0d79-4376-9627-6075a8cdf09c-ssh-key\") pod \"nova-edpm-deployment-openstack-edpm-ipam-h4cr7\" (UID: \"06ca3195-0d79-4376-9627-6075a8cdf09c\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-h4cr7" Sep 29 19:46:17 crc kubenswrapper[4779]: I0929 19:46:17.143241 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gbh2j\" (UniqueName: \"kubernetes.io/projected/06ca3195-0d79-4376-9627-6075a8cdf09c-kube-api-access-gbh2j\") pod \"nova-edpm-deployment-openstack-edpm-ipam-h4cr7\" (UID: \"06ca3195-0d79-4376-9627-6075a8cdf09c\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-h4cr7" Sep 29 19:46:17 crc kubenswrapper[4779]: I0929 19:46:17.212185 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-h4cr7" Sep 29 19:46:17 crc kubenswrapper[4779]: I0929 19:46:17.656964 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-edpm-deployment-openstack-edpm-ipam-h4cr7"] Sep 29 19:46:17 crc kubenswrapper[4779]: I0929 19:46:17.791694 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-h4cr7" event={"ID":"06ca3195-0d79-4376-9627-6075a8cdf09c","Type":"ContainerStarted","Data":"0d57db0ae7b0384380aadb29a8ed55bb9343ce13acd8a3b3b7a99006ca4ec4a7"} Sep 29 19:46:18 crc kubenswrapper[4779]: I0929 19:46:18.830393 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-h4cr7" event={"ID":"06ca3195-0d79-4376-9627-6075a8cdf09c","Type":"ContainerStarted","Data":"9e0aa64b0290ef46e65b6cebd68341900d8d8578c47ce37921417fb637c9a207"} Sep 29 19:47:13 crc kubenswrapper[4779]: I0929 19:47:13.785268 4779 patch_prober.go:28] interesting pod/machine-config-daemon-d5cnr container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 19:47:13 crc kubenswrapper[4779]: I0929 19:47:13.786009 4779 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" podUID="476bc421-1113-455e-bcc8-e207e47dad19" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 19:47:43 crc kubenswrapper[4779]: I0929 19:47:43.785216 4779 patch_prober.go:28] interesting pod/machine-config-daemon-d5cnr container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 19:47:43 crc kubenswrapper[4779]: I0929 19:47:43.786052 4779 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" podUID="476bc421-1113-455e-bcc8-e207e47dad19" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 19:48:13 crc kubenswrapper[4779]: I0929 19:48:13.785637 4779 patch_prober.go:28] interesting pod/machine-config-daemon-d5cnr container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 19:48:13 crc kubenswrapper[4779]: I0929 19:48:13.786289 4779 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" podUID="476bc421-1113-455e-bcc8-e207e47dad19" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 19:48:13 crc kubenswrapper[4779]: I0929 19:48:13.786366 4779 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" Sep 29 19:48:13 crc kubenswrapper[4779]: I0929 19:48:13.787352 4779 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"01e960f6c47393436db220d868c528f2bb6c5c1ddb0bfa806d83addfe11f5c5c"} pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 29 19:48:13 crc kubenswrapper[4779]: I0929 19:48:13.787419 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" podUID="476bc421-1113-455e-bcc8-e207e47dad19" containerName="machine-config-daemon" containerID="cri-o://01e960f6c47393436db220d868c528f2bb6c5c1ddb0bfa806d83addfe11f5c5c" gracePeriod=600 Sep 29 19:48:13 crc kubenswrapper[4779]: E0929 19:48:13.923439 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d5cnr_openshift-machine-config-operator(476bc421-1113-455e-bcc8-e207e47dad19)\"" pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" podUID="476bc421-1113-455e-bcc8-e207e47dad19" Sep 29 19:48:14 crc kubenswrapper[4779]: I0929 19:48:14.103000 4779 generic.go:334] "Generic (PLEG): container finished" podID="476bc421-1113-455e-bcc8-e207e47dad19" containerID="01e960f6c47393436db220d868c528f2bb6c5c1ddb0bfa806d83addfe11f5c5c" exitCode=0 Sep 29 19:48:14 crc kubenswrapper[4779]: I0929 19:48:14.103204 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" event={"ID":"476bc421-1113-455e-bcc8-e207e47dad19","Type":"ContainerDied","Data":"01e960f6c47393436db220d868c528f2bb6c5c1ddb0bfa806d83addfe11f5c5c"} Sep 29 19:48:14 crc kubenswrapper[4779]: I0929 19:48:14.103406 4779 scope.go:117] "RemoveContainer" containerID="9fb2d77a4f4cf683107843d418292174e771334c111af4805d091312c11c1c0e" Sep 29 19:48:14 crc kubenswrapper[4779]: I0929 19:48:14.104385 4779 scope.go:117] "RemoveContainer" containerID="01e960f6c47393436db220d868c528f2bb6c5c1ddb0bfa806d83addfe11f5c5c" Sep 29 19:48:14 crc kubenswrapper[4779]: E0929 19:48:14.104824 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d5cnr_openshift-machine-config-operator(476bc421-1113-455e-bcc8-e207e47dad19)\"" pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" podUID="476bc421-1113-455e-bcc8-e207e47dad19" Sep 29 19:48:14 crc kubenswrapper[4779]: I0929 19:48:14.147520 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-h4cr7" podStartSLOduration=117.711084162 podStartE2EDuration="1m58.147497664s" podCreationTimestamp="2025-09-29 19:46:16 +0000 UTC" firstStartedPulling="2025-09-29 19:46:17.701926706 +0000 UTC m=+2288.586351806" lastFinishedPulling="2025-09-29 19:46:18.138340188 +0000 UTC m=+2289.022765308" observedRunningTime="2025-09-29 19:46:18.854383958 +0000 UTC m=+2289.738809078" watchObservedRunningTime="2025-09-29 19:48:14.147497664 +0000 UTC m=+2405.031922774" Sep 29 19:48:27 crc kubenswrapper[4779]: I0929 19:48:27.769104 4779 scope.go:117] "RemoveContainer" containerID="01e960f6c47393436db220d868c528f2bb6c5c1ddb0bfa806d83addfe11f5c5c" Sep 29 19:48:27 crc kubenswrapper[4779]: E0929 19:48:27.770382 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d5cnr_openshift-machine-config-operator(476bc421-1113-455e-bcc8-e207e47dad19)\"" pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" podUID="476bc421-1113-455e-bcc8-e207e47dad19" Sep 29 19:48:42 crc kubenswrapper[4779]: I0929 19:48:42.767432 4779 scope.go:117] "RemoveContainer" containerID="01e960f6c47393436db220d868c528f2bb6c5c1ddb0bfa806d83addfe11f5c5c" Sep 29 19:48:42 crc kubenswrapper[4779]: E0929 19:48:42.768676 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d5cnr_openshift-machine-config-operator(476bc421-1113-455e-bcc8-e207e47dad19)\"" pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" podUID="476bc421-1113-455e-bcc8-e207e47dad19" Sep 29 19:48:55 crc kubenswrapper[4779]: I0929 19:48:55.767066 4779 scope.go:117] "RemoveContainer" containerID="01e960f6c47393436db220d868c528f2bb6c5c1ddb0bfa806d83addfe11f5c5c" Sep 29 19:48:55 crc kubenswrapper[4779]: E0929 19:48:55.768163 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d5cnr_openshift-machine-config-operator(476bc421-1113-455e-bcc8-e207e47dad19)\"" pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" podUID="476bc421-1113-455e-bcc8-e207e47dad19" Sep 29 19:49:10 crc kubenswrapper[4779]: I0929 19:49:10.767131 4779 scope.go:117] "RemoveContainer" containerID="01e960f6c47393436db220d868c528f2bb6c5c1ddb0bfa806d83addfe11f5c5c" Sep 29 19:49:10 crc kubenswrapper[4779]: E0929 19:49:10.768182 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d5cnr_openshift-machine-config-operator(476bc421-1113-455e-bcc8-e207e47dad19)\"" pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" podUID="476bc421-1113-455e-bcc8-e207e47dad19" Sep 29 19:49:21 crc kubenswrapper[4779]: I0929 19:49:21.766149 4779 scope.go:117] "RemoveContainer" containerID="01e960f6c47393436db220d868c528f2bb6c5c1ddb0bfa806d83addfe11f5c5c" Sep 29 19:49:21 crc kubenswrapper[4779]: E0929 19:49:21.767113 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d5cnr_openshift-machine-config-operator(476bc421-1113-455e-bcc8-e207e47dad19)\"" pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" podUID="476bc421-1113-455e-bcc8-e207e47dad19" Sep 29 19:49:32 crc kubenswrapper[4779]: I0929 19:49:32.766539 4779 scope.go:117] "RemoveContainer" containerID="01e960f6c47393436db220d868c528f2bb6c5c1ddb0bfa806d83addfe11f5c5c" Sep 29 19:49:32 crc kubenswrapper[4779]: E0929 19:49:32.767455 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d5cnr_openshift-machine-config-operator(476bc421-1113-455e-bcc8-e207e47dad19)\"" pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" podUID="476bc421-1113-455e-bcc8-e207e47dad19" Sep 29 19:49:43 crc kubenswrapper[4779]: I0929 19:49:43.766119 4779 scope.go:117] "RemoveContainer" containerID="01e960f6c47393436db220d868c528f2bb6c5c1ddb0bfa806d83addfe11f5c5c" Sep 29 19:49:43 crc kubenswrapper[4779]: E0929 19:49:43.767148 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d5cnr_openshift-machine-config-operator(476bc421-1113-455e-bcc8-e207e47dad19)\"" pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" podUID="476bc421-1113-455e-bcc8-e207e47dad19" Sep 29 19:49:53 crc kubenswrapper[4779]: I0929 19:49:53.228200 4779 generic.go:334] "Generic (PLEG): container finished" podID="06ca3195-0d79-4376-9627-6075a8cdf09c" containerID="9e0aa64b0290ef46e65b6cebd68341900d8d8578c47ce37921417fb637c9a207" exitCode=0 Sep 29 19:49:53 crc kubenswrapper[4779]: I0929 19:49:53.228298 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-h4cr7" event={"ID":"06ca3195-0d79-4376-9627-6075a8cdf09c","Type":"ContainerDied","Data":"9e0aa64b0290ef46e65b6cebd68341900d8d8578c47ce37921417fb637c9a207"} Sep 29 19:49:54 crc kubenswrapper[4779]: I0929 19:49:54.728157 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-h4cr7" Sep 29 19:49:54 crc kubenswrapper[4779]: I0929 19:49:54.859976 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/06ca3195-0d79-4376-9627-6075a8cdf09c-inventory\") pod \"06ca3195-0d79-4376-9627-6075a8cdf09c\" (UID: \"06ca3195-0d79-4376-9627-6075a8cdf09c\") " Sep 29 19:49:54 crc kubenswrapper[4779]: I0929 19:49:54.860045 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/06ca3195-0d79-4376-9627-6075a8cdf09c-ssh-key\") pod \"06ca3195-0d79-4376-9627-6075a8cdf09c\" (UID: \"06ca3195-0d79-4376-9627-6075a8cdf09c\") " Sep 29 19:49:54 crc kubenswrapper[4779]: I0929 19:49:54.860089 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/06ca3195-0d79-4376-9627-6075a8cdf09c-nova-cell1-compute-config-0\") pod \"06ca3195-0d79-4376-9627-6075a8cdf09c\" (UID: \"06ca3195-0d79-4376-9627-6075a8cdf09c\") " Sep 29 19:49:54 crc kubenswrapper[4779]: I0929 19:49:54.860112 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/06ca3195-0d79-4376-9627-6075a8cdf09c-nova-migration-ssh-key-0\") pod \"06ca3195-0d79-4376-9627-6075a8cdf09c\" (UID: \"06ca3195-0d79-4376-9627-6075a8cdf09c\") " Sep 29 19:49:54 crc kubenswrapper[4779]: I0929 19:49:54.860155 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gbh2j\" (UniqueName: \"kubernetes.io/projected/06ca3195-0d79-4376-9627-6075a8cdf09c-kube-api-access-gbh2j\") pod \"06ca3195-0d79-4376-9627-6075a8cdf09c\" (UID: \"06ca3195-0d79-4376-9627-6075a8cdf09c\") " Sep 29 19:49:54 crc kubenswrapper[4779]: I0929 19:49:54.860173 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/06ca3195-0d79-4376-9627-6075a8cdf09c-nova-migration-ssh-key-1\") pod \"06ca3195-0d79-4376-9627-6075a8cdf09c\" (UID: \"06ca3195-0d79-4376-9627-6075a8cdf09c\") " Sep 29 19:49:54 crc kubenswrapper[4779]: I0929 19:49:54.860212 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/06ca3195-0d79-4376-9627-6075a8cdf09c-nova-combined-ca-bundle\") pod \"06ca3195-0d79-4376-9627-6075a8cdf09c\" (UID: \"06ca3195-0d79-4376-9627-6075a8cdf09c\") " Sep 29 19:49:54 crc kubenswrapper[4779]: I0929 19:49:54.861343 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/06ca3195-0d79-4376-9627-6075a8cdf09c-nova-cell1-compute-config-1\") pod \"06ca3195-0d79-4376-9627-6075a8cdf09c\" (UID: \"06ca3195-0d79-4376-9627-6075a8cdf09c\") " Sep 29 19:49:54 crc kubenswrapper[4779]: I0929 19:49:54.861465 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/06ca3195-0d79-4376-9627-6075a8cdf09c-nova-extra-config-0\") pod \"06ca3195-0d79-4376-9627-6075a8cdf09c\" (UID: \"06ca3195-0d79-4376-9627-6075a8cdf09c\") " Sep 29 19:49:54 crc kubenswrapper[4779]: I0929 19:49:54.866574 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/06ca3195-0d79-4376-9627-6075a8cdf09c-kube-api-access-gbh2j" (OuterVolumeSpecName: "kube-api-access-gbh2j") pod "06ca3195-0d79-4376-9627-6075a8cdf09c" (UID: "06ca3195-0d79-4376-9627-6075a8cdf09c"). InnerVolumeSpecName "kube-api-access-gbh2j". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:49:54 crc kubenswrapper[4779]: I0929 19:49:54.871448 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/06ca3195-0d79-4376-9627-6075a8cdf09c-nova-combined-ca-bundle" (OuterVolumeSpecName: "nova-combined-ca-bundle") pod "06ca3195-0d79-4376-9627-6075a8cdf09c" (UID: "06ca3195-0d79-4376-9627-6075a8cdf09c"). InnerVolumeSpecName "nova-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:49:54 crc kubenswrapper[4779]: I0929 19:49:54.892497 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/06ca3195-0d79-4376-9627-6075a8cdf09c-nova-cell1-compute-config-1" (OuterVolumeSpecName: "nova-cell1-compute-config-1") pod "06ca3195-0d79-4376-9627-6075a8cdf09c" (UID: "06ca3195-0d79-4376-9627-6075a8cdf09c"). InnerVolumeSpecName "nova-cell1-compute-config-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:49:54 crc kubenswrapper[4779]: I0929 19:49:54.892925 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/06ca3195-0d79-4376-9627-6075a8cdf09c-nova-migration-ssh-key-0" (OuterVolumeSpecName: "nova-migration-ssh-key-0") pod "06ca3195-0d79-4376-9627-6075a8cdf09c" (UID: "06ca3195-0d79-4376-9627-6075a8cdf09c"). InnerVolumeSpecName "nova-migration-ssh-key-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:49:54 crc kubenswrapper[4779]: I0929 19:49:54.894730 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/06ca3195-0d79-4376-9627-6075a8cdf09c-nova-cell1-compute-config-0" (OuterVolumeSpecName: "nova-cell1-compute-config-0") pod "06ca3195-0d79-4376-9627-6075a8cdf09c" (UID: "06ca3195-0d79-4376-9627-6075a8cdf09c"). InnerVolumeSpecName "nova-cell1-compute-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:49:54 crc kubenswrapper[4779]: I0929 19:49:54.897684 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/06ca3195-0d79-4376-9627-6075a8cdf09c-nova-extra-config-0" (OuterVolumeSpecName: "nova-extra-config-0") pod "06ca3195-0d79-4376-9627-6075a8cdf09c" (UID: "06ca3195-0d79-4376-9627-6075a8cdf09c"). InnerVolumeSpecName "nova-extra-config-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 19:49:54 crc kubenswrapper[4779]: I0929 19:49:54.903922 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/06ca3195-0d79-4376-9627-6075a8cdf09c-inventory" (OuterVolumeSpecName: "inventory") pod "06ca3195-0d79-4376-9627-6075a8cdf09c" (UID: "06ca3195-0d79-4376-9627-6075a8cdf09c"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:49:54 crc kubenswrapper[4779]: I0929 19:49:54.905592 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/06ca3195-0d79-4376-9627-6075a8cdf09c-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "06ca3195-0d79-4376-9627-6075a8cdf09c" (UID: "06ca3195-0d79-4376-9627-6075a8cdf09c"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:49:54 crc kubenswrapper[4779]: I0929 19:49:54.920036 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/06ca3195-0d79-4376-9627-6075a8cdf09c-nova-migration-ssh-key-1" (OuterVolumeSpecName: "nova-migration-ssh-key-1") pod "06ca3195-0d79-4376-9627-6075a8cdf09c" (UID: "06ca3195-0d79-4376-9627-6075a8cdf09c"). InnerVolumeSpecName "nova-migration-ssh-key-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:49:54 crc kubenswrapper[4779]: I0929 19:49:54.963475 4779 reconciler_common.go:293] "Volume detached for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/06ca3195-0d79-4376-9627-6075a8cdf09c-nova-cell1-compute-config-1\") on node \"crc\" DevicePath \"\"" Sep 29 19:49:54 crc kubenswrapper[4779]: I0929 19:49:54.963738 4779 reconciler_common.go:293] "Volume detached for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/06ca3195-0d79-4376-9627-6075a8cdf09c-nova-extra-config-0\") on node \"crc\" DevicePath \"\"" Sep 29 19:49:54 crc kubenswrapper[4779]: I0929 19:49:54.963748 4779 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/06ca3195-0d79-4376-9627-6075a8cdf09c-inventory\") on node \"crc\" DevicePath \"\"" Sep 29 19:49:54 crc kubenswrapper[4779]: I0929 19:49:54.963756 4779 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/06ca3195-0d79-4376-9627-6075a8cdf09c-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 29 19:49:54 crc kubenswrapper[4779]: I0929 19:49:54.963766 4779 reconciler_common.go:293] "Volume detached for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/06ca3195-0d79-4376-9627-6075a8cdf09c-nova-cell1-compute-config-0\") on node \"crc\" DevicePath \"\"" Sep 29 19:49:54 crc kubenswrapper[4779]: I0929 19:49:54.963773 4779 reconciler_common.go:293] "Volume detached for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/06ca3195-0d79-4376-9627-6075a8cdf09c-nova-migration-ssh-key-0\") on node \"crc\" DevicePath \"\"" Sep 29 19:49:54 crc kubenswrapper[4779]: I0929 19:49:54.963782 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gbh2j\" (UniqueName: \"kubernetes.io/projected/06ca3195-0d79-4376-9627-6075a8cdf09c-kube-api-access-gbh2j\") on node \"crc\" DevicePath \"\"" Sep 29 19:49:54 crc kubenswrapper[4779]: I0929 19:49:54.963792 4779 reconciler_common.go:293] "Volume detached for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/06ca3195-0d79-4376-9627-6075a8cdf09c-nova-migration-ssh-key-1\") on node \"crc\" DevicePath \"\"" Sep 29 19:49:54 crc kubenswrapper[4779]: I0929 19:49:54.963801 4779 reconciler_common.go:293] "Volume detached for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/06ca3195-0d79-4376-9627-6075a8cdf09c-nova-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 19:49:55 crc kubenswrapper[4779]: I0929 19:49:55.251882 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-h4cr7" event={"ID":"06ca3195-0d79-4376-9627-6075a8cdf09c","Type":"ContainerDied","Data":"0d57db0ae7b0384380aadb29a8ed55bb9343ce13acd8a3b3b7a99006ca4ec4a7"} Sep 29 19:49:55 crc kubenswrapper[4779]: I0929 19:49:55.251939 4779 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0d57db0ae7b0384380aadb29a8ed55bb9343ce13acd8a3b3b7a99006ca4ec4a7" Sep 29 19:49:55 crc kubenswrapper[4779]: I0929 19:49:55.252009 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-h4cr7" Sep 29 19:49:55 crc kubenswrapper[4779]: I0929 19:49:55.419293 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/telemetry-edpm-deployment-openstack-edpm-ipam-vf74d"] Sep 29 19:49:55 crc kubenswrapper[4779]: E0929 19:49:55.419960 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="06ca3195-0d79-4376-9627-6075a8cdf09c" containerName="nova-edpm-deployment-openstack-edpm-ipam" Sep 29 19:49:55 crc kubenswrapper[4779]: I0929 19:49:55.419992 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="06ca3195-0d79-4376-9627-6075a8cdf09c" containerName="nova-edpm-deployment-openstack-edpm-ipam" Sep 29 19:49:55 crc kubenswrapper[4779]: I0929 19:49:55.420475 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="06ca3195-0d79-4376-9627-6075a8cdf09c" containerName="nova-edpm-deployment-openstack-edpm-ipam" Sep 29 19:49:55 crc kubenswrapper[4779]: I0929 19:49:55.421636 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-vf74d" Sep 29 19:49:55 crc kubenswrapper[4779]: I0929 19:49:55.429430 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/telemetry-edpm-deployment-openstack-edpm-ipam-vf74d"] Sep 29 19:49:55 crc kubenswrapper[4779]: I0929 19:49:55.431072 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Sep 29 19:49:55 crc kubenswrapper[4779]: I0929 19:49:55.431183 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-v8454" Sep 29 19:49:55 crc kubenswrapper[4779]: I0929 19:49:55.431216 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 29 19:49:55 crc kubenswrapper[4779]: I0929 19:49:55.431536 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-compute-config-data" Sep 29 19:49:55 crc kubenswrapper[4779]: I0929 19:49:55.432187 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Sep 29 19:49:55 crc kubenswrapper[4779]: I0929 19:49:55.476055 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-46pj6\" (UniqueName: \"kubernetes.io/projected/a8bc7976-d585-4a94-b925-870996cc4ae3-kube-api-access-46pj6\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-vf74d\" (UID: \"a8bc7976-d585-4a94-b925-870996cc4ae3\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-vf74d" Sep 29 19:49:55 crc kubenswrapper[4779]: I0929 19:49:55.476199 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/a8bc7976-d585-4a94-b925-870996cc4ae3-ceilometer-compute-config-data-0\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-vf74d\" (UID: \"a8bc7976-d585-4a94-b925-870996cc4ae3\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-vf74d" Sep 29 19:49:55 crc kubenswrapper[4779]: I0929 19:49:55.476403 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/a8bc7976-d585-4a94-b925-870996cc4ae3-ceilometer-compute-config-data-2\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-vf74d\" (UID: \"a8bc7976-d585-4a94-b925-870996cc4ae3\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-vf74d" Sep 29 19:49:55 crc kubenswrapper[4779]: I0929 19:49:55.476434 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a8bc7976-d585-4a94-b925-870996cc4ae3-inventory\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-vf74d\" (UID: \"a8bc7976-d585-4a94-b925-870996cc4ae3\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-vf74d" Sep 29 19:49:55 crc kubenswrapper[4779]: I0929 19:49:55.476453 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a8bc7976-d585-4a94-b925-870996cc4ae3-ssh-key\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-vf74d\" (UID: \"a8bc7976-d585-4a94-b925-870996cc4ae3\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-vf74d" Sep 29 19:49:55 crc kubenswrapper[4779]: I0929 19:49:55.476471 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/a8bc7976-d585-4a94-b925-870996cc4ae3-ceilometer-compute-config-data-1\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-vf74d\" (UID: \"a8bc7976-d585-4a94-b925-870996cc4ae3\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-vf74d" Sep 29 19:49:55 crc kubenswrapper[4779]: I0929 19:49:55.476504 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a8bc7976-d585-4a94-b925-870996cc4ae3-telemetry-combined-ca-bundle\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-vf74d\" (UID: \"a8bc7976-d585-4a94-b925-870996cc4ae3\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-vf74d" Sep 29 19:49:55 crc kubenswrapper[4779]: I0929 19:49:55.577896 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-46pj6\" (UniqueName: \"kubernetes.io/projected/a8bc7976-d585-4a94-b925-870996cc4ae3-kube-api-access-46pj6\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-vf74d\" (UID: \"a8bc7976-d585-4a94-b925-870996cc4ae3\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-vf74d" Sep 29 19:49:55 crc kubenswrapper[4779]: I0929 19:49:55.577950 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/a8bc7976-d585-4a94-b925-870996cc4ae3-ceilometer-compute-config-data-0\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-vf74d\" (UID: \"a8bc7976-d585-4a94-b925-870996cc4ae3\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-vf74d" Sep 29 19:49:55 crc kubenswrapper[4779]: I0929 19:49:55.578006 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/a8bc7976-d585-4a94-b925-870996cc4ae3-ceilometer-compute-config-data-2\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-vf74d\" (UID: \"a8bc7976-d585-4a94-b925-870996cc4ae3\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-vf74d" Sep 29 19:49:55 crc kubenswrapper[4779]: I0929 19:49:55.578044 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a8bc7976-d585-4a94-b925-870996cc4ae3-ssh-key\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-vf74d\" (UID: \"a8bc7976-d585-4a94-b925-870996cc4ae3\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-vf74d" Sep 29 19:49:55 crc kubenswrapper[4779]: I0929 19:49:55.578061 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a8bc7976-d585-4a94-b925-870996cc4ae3-inventory\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-vf74d\" (UID: \"a8bc7976-d585-4a94-b925-870996cc4ae3\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-vf74d" Sep 29 19:49:55 crc kubenswrapper[4779]: I0929 19:49:55.582776 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/a8bc7976-d585-4a94-b925-870996cc4ae3-ceilometer-compute-config-data-1\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-vf74d\" (UID: \"a8bc7976-d585-4a94-b925-870996cc4ae3\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-vf74d" Sep 29 19:49:55 crc kubenswrapper[4779]: I0929 19:49:55.582864 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a8bc7976-d585-4a94-b925-870996cc4ae3-telemetry-combined-ca-bundle\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-vf74d\" (UID: \"a8bc7976-d585-4a94-b925-870996cc4ae3\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-vf74d" Sep 29 19:49:55 crc kubenswrapper[4779]: I0929 19:49:55.585019 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/a8bc7976-d585-4a94-b925-870996cc4ae3-ceilometer-compute-config-data-2\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-vf74d\" (UID: \"a8bc7976-d585-4a94-b925-870996cc4ae3\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-vf74d" Sep 29 19:49:55 crc kubenswrapper[4779]: I0929 19:49:55.585231 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a8bc7976-d585-4a94-b925-870996cc4ae3-inventory\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-vf74d\" (UID: \"a8bc7976-d585-4a94-b925-870996cc4ae3\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-vf74d" Sep 29 19:49:55 crc kubenswrapper[4779]: I0929 19:49:55.586160 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/a8bc7976-d585-4a94-b925-870996cc4ae3-ceilometer-compute-config-data-0\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-vf74d\" (UID: \"a8bc7976-d585-4a94-b925-870996cc4ae3\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-vf74d" Sep 29 19:49:55 crc kubenswrapper[4779]: I0929 19:49:55.586470 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/a8bc7976-d585-4a94-b925-870996cc4ae3-ceilometer-compute-config-data-1\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-vf74d\" (UID: \"a8bc7976-d585-4a94-b925-870996cc4ae3\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-vf74d" Sep 29 19:49:55 crc kubenswrapper[4779]: I0929 19:49:55.586843 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a8bc7976-d585-4a94-b925-870996cc4ae3-ssh-key\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-vf74d\" (UID: \"a8bc7976-d585-4a94-b925-870996cc4ae3\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-vf74d" Sep 29 19:49:55 crc kubenswrapper[4779]: I0929 19:49:55.589753 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a8bc7976-d585-4a94-b925-870996cc4ae3-telemetry-combined-ca-bundle\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-vf74d\" (UID: \"a8bc7976-d585-4a94-b925-870996cc4ae3\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-vf74d" Sep 29 19:49:55 crc kubenswrapper[4779]: I0929 19:49:55.608897 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-46pj6\" (UniqueName: \"kubernetes.io/projected/a8bc7976-d585-4a94-b925-870996cc4ae3-kube-api-access-46pj6\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-vf74d\" (UID: \"a8bc7976-d585-4a94-b925-870996cc4ae3\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-vf74d" Sep 29 19:49:55 crc kubenswrapper[4779]: I0929 19:49:55.742537 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-vf74d" Sep 29 19:49:55 crc kubenswrapper[4779]: I0929 19:49:55.766697 4779 scope.go:117] "RemoveContainer" containerID="01e960f6c47393436db220d868c528f2bb6c5c1ddb0bfa806d83addfe11f5c5c" Sep 29 19:49:55 crc kubenswrapper[4779]: E0929 19:49:55.767173 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d5cnr_openshift-machine-config-operator(476bc421-1113-455e-bcc8-e207e47dad19)\"" pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" podUID="476bc421-1113-455e-bcc8-e207e47dad19" Sep 29 19:49:56 crc kubenswrapper[4779]: I0929 19:49:56.341819 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/telemetry-edpm-deployment-openstack-edpm-ipam-vf74d"] Sep 29 19:49:56 crc kubenswrapper[4779]: I0929 19:49:56.355599 4779 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Sep 29 19:49:57 crc kubenswrapper[4779]: I0929 19:49:57.282085 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-vf74d" event={"ID":"a8bc7976-d585-4a94-b925-870996cc4ae3","Type":"ContainerStarted","Data":"a6c5e10c78f485ccacdf1498a3ad251229f2defeef38c7185a1c7f226f200c6b"} Sep 29 19:49:58 crc kubenswrapper[4779]: I0929 19:49:58.297042 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-vf74d" event={"ID":"a8bc7976-d585-4a94-b925-870996cc4ae3","Type":"ContainerStarted","Data":"17aeedf2a5a682b7a45caf204c4b59157be154fcff8fb0f41e6c6d49caf3115f"} Sep 29 19:49:58 crc kubenswrapper[4779]: I0929 19:49:58.334467 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-vf74d" podStartSLOduration=2.593123784 podStartE2EDuration="3.334438775s" podCreationTimestamp="2025-09-29 19:49:55 +0000 UTC" firstStartedPulling="2025-09-29 19:49:56.35202613 +0000 UTC m=+2507.236451270" lastFinishedPulling="2025-09-29 19:49:57.093341121 +0000 UTC m=+2507.977766261" observedRunningTime="2025-09-29 19:49:58.320972267 +0000 UTC m=+2509.205397407" watchObservedRunningTime="2025-09-29 19:49:58.334438775 +0000 UTC m=+2509.218863905" Sep 29 19:50:08 crc kubenswrapper[4779]: I0929 19:50:08.766434 4779 scope.go:117] "RemoveContainer" containerID="01e960f6c47393436db220d868c528f2bb6c5c1ddb0bfa806d83addfe11f5c5c" Sep 29 19:50:08 crc kubenswrapper[4779]: E0929 19:50:08.782497 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d5cnr_openshift-machine-config-operator(476bc421-1113-455e-bcc8-e207e47dad19)\"" pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" podUID="476bc421-1113-455e-bcc8-e207e47dad19" Sep 29 19:50:21 crc kubenswrapper[4779]: I0929 19:50:21.766725 4779 scope.go:117] "RemoveContainer" containerID="01e960f6c47393436db220d868c528f2bb6c5c1ddb0bfa806d83addfe11f5c5c" Sep 29 19:50:21 crc kubenswrapper[4779]: E0929 19:50:21.767644 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d5cnr_openshift-machine-config-operator(476bc421-1113-455e-bcc8-e207e47dad19)\"" pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" podUID="476bc421-1113-455e-bcc8-e207e47dad19" Sep 29 19:50:35 crc kubenswrapper[4779]: I0929 19:50:35.766857 4779 scope.go:117] "RemoveContainer" containerID="01e960f6c47393436db220d868c528f2bb6c5c1ddb0bfa806d83addfe11f5c5c" Sep 29 19:50:35 crc kubenswrapper[4779]: E0929 19:50:35.767680 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d5cnr_openshift-machine-config-operator(476bc421-1113-455e-bcc8-e207e47dad19)\"" pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" podUID="476bc421-1113-455e-bcc8-e207e47dad19" Sep 29 19:50:46 crc kubenswrapper[4779]: I0929 19:50:46.766690 4779 scope.go:117] "RemoveContainer" containerID="01e960f6c47393436db220d868c528f2bb6c5c1ddb0bfa806d83addfe11f5c5c" Sep 29 19:50:46 crc kubenswrapper[4779]: E0929 19:50:46.767327 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d5cnr_openshift-machine-config-operator(476bc421-1113-455e-bcc8-e207e47dad19)\"" pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" podUID="476bc421-1113-455e-bcc8-e207e47dad19" Sep 29 19:50:57 crc kubenswrapper[4779]: I0929 19:50:57.766458 4779 scope.go:117] "RemoveContainer" containerID="01e960f6c47393436db220d868c528f2bb6c5c1ddb0bfa806d83addfe11f5c5c" Sep 29 19:50:57 crc kubenswrapper[4779]: E0929 19:50:57.767301 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d5cnr_openshift-machine-config-operator(476bc421-1113-455e-bcc8-e207e47dad19)\"" pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" podUID="476bc421-1113-455e-bcc8-e207e47dad19" Sep 29 19:51:09 crc kubenswrapper[4779]: I0929 19:51:09.778752 4779 scope.go:117] "RemoveContainer" containerID="01e960f6c47393436db220d868c528f2bb6c5c1ddb0bfa806d83addfe11f5c5c" Sep 29 19:51:09 crc kubenswrapper[4779]: E0929 19:51:09.780075 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d5cnr_openshift-machine-config-operator(476bc421-1113-455e-bcc8-e207e47dad19)\"" pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" podUID="476bc421-1113-455e-bcc8-e207e47dad19" Sep 29 19:51:17 crc kubenswrapper[4779]: I0929 19:51:17.116250 4779 scope.go:117] "RemoveContainer" containerID="8c91fb2c84dae407794234f9fe2c6ce755dede6695b8a76f64baae0c92c80083" Sep 29 19:51:17 crc kubenswrapper[4779]: I0929 19:51:17.152820 4779 scope.go:117] "RemoveContainer" containerID="335ad8e4d5822c3dc857afb7e1e3c4bd2d6f526db19cb380d2c0c2c3999b4ca4" Sep 29 19:51:17 crc kubenswrapper[4779]: I0929 19:51:17.229504 4779 scope.go:117] "RemoveContainer" containerID="29a54190155cebf42479d3f9a73f87ec36fb2212b242674b157b275947c003cf" Sep 29 19:51:24 crc kubenswrapper[4779]: I0929 19:51:24.767166 4779 scope.go:117] "RemoveContainer" containerID="01e960f6c47393436db220d868c528f2bb6c5c1ddb0bfa806d83addfe11f5c5c" Sep 29 19:51:24 crc kubenswrapper[4779]: E0929 19:51:24.768106 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d5cnr_openshift-machine-config-operator(476bc421-1113-455e-bcc8-e207e47dad19)\"" pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" podUID="476bc421-1113-455e-bcc8-e207e47dad19" Sep 29 19:51:35 crc kubenswrapper[4779]: I0929 19:51:35.766756 4779 scope.go:117] "RemoveContainer" containerID="01e960f6c47393436db220d868c528f2bb6c5c1ddb0bfa806d83addfe11f5c5c" Sep 29 19:51:35 crc kubenswrapper[4779]: E0929 19:51:35.768518 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d5cnr_openshift-machine-config-operator(476bc421-1113-455e-bcc8-e207e47dad19)\"" pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" podUID="476bc421-1113-455e-bcc8-e207e47dad19" Sep 29 19:51:48 crc kubenswrapper[4779]: I0929 19:51:48.766821 4779 scope.go:117] "RemoveContainer" containerID="01e960f6c47393436db220d868c528f2bb6c5c1ddb0bfa806d83addfe11f5c5c" Sep 29 19:51:48 crc kubenswrapper[4779]: E0929 19:51:48.767955 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d5cnr_openshift-machine-config-operator(476bc421-1113-455e-bcc8-e207e47dad19)\"" pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" podUID="476bc421-1113-455e-bcc8-e207e47dad19" Sep 29 19:51:58 crc kubenswrapper[4779]: I0929 19:51:58.520858 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-lxq4r"] Sep 29 19:51:58 crc kubenswrapper[4779]: I0929 19:51:58.525582 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-lxq4r" Sep 29 19:51:58 crc kubenswrapper[4779]: I0929 19:51:58.539483 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-lxq4r"] Sep 29 19:51:58 crc kubenswrapper[4779]: I0929 19:51:58.599928 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6cded7fe-a53d-40cb-8b2e-556854e3bc1c-utilities\") pod \"redhat-marketplace-lxq4r\" (UID: \"6cded7fe-a53d-40cb-8b2e-556854e3bc1c\") " pod="openshift-marketplace/redhat-marketplace-lxq4r" Sep 29 19:51:58 crc kubenswrapper[4779]: I0929 19:51:58.600011 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6cded7fe-a53d-40cb-8b2e-556854e3bc1c-catalog-content\") pod \"redhat-marketplace-lxq4r\" (UID: \"6cded7fe-a53d-40cb-8b2e-556854e3bc1c\") " pod="openshift-marketplace/redhat-marketplace-lxq4r" Sep 29 19:51:58 crc kubenswrapper[4779]: I0929 19:51:58.600209 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vjjkj\" (UniqueName: \"kubernetes.io/projected/6cded7fe-a53d-40cb-8b2e-556854e3bc1c-kube-api-access-vjjkj\") pod \"redhat-marketplace-lxq4r\" (UID: \"6cded7fe-a53d-40cb-8b2e-556854e3bc1c\") " pod="openshift-marketplace/redhat-marketplace-lxq4r" Sep 29 19:51:58 crc kubenswrapper[4779]: I0929 19:51:58.701677 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vjjkj\" (UniqueName: \"kubernetes.io/projected/6cded7fe-a53d-40cb-8b2e-556854e3bc1c-kube-api-access-vjjkj\") pod \"redhat-marketplace-lxq4r\" (UID: \"6cded7fe-a53d-40cb-8b2e-556854e3bc1c\") " pod="openshift-marketplace/redhat-marketplace-lxq4r" Sep 29 19:51:58 crc kubenswrapper[4779]: I0929 19:51:58.701761 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6cded7fe-a53d-40cb-8b2e-556854e3bc1c-utilities\") pod \"redhat-marketplace-lxq4r\" (UID: \"6cded7fe-a53d-40cb-8b2e-556854e3bc1c\") " pod="openshift-marketplace/redhat-marketplace-lxq4r" Sep 29 19:51:58 crc kubenswrapper[4779]: I0929 19:51:58.701789 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6cded7fe-a53d-40cb-8b2e-556854e3bc1c-catalog-content\") pod \"redhat-marketplace-lxq4r\" (UID: \"6cded7fe-a53d-40cb-8b2e-556854e3bc1c\") " pod="openshift-marketplace/redhat-marketplace-lxq4r" Sep 29 19:51:58 crc kubenswrapper[4779]: I0929 19:51:58.702295 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6cded7fe-a53d-40cb-8b2e-556854e3bc1c-catalog-content\") pod \"redhat-marketplace-lxq4r\" (UID: \"6cded7fe-a53d-40cb-8b2e-556854e3bc1c\") " pod="openshift-marketplace/redhat-marketplace-lxq4r" Sep 29 19:51:58 crc kubenswrapper[4779]: I0929 19:51:58.702965 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6cded7fe-a53d-40cb-8b2e-556854e3bc1c-utilities\") pod \"redhat-marketplace-lxq4r\" (UID: \"6cded7fe-a53d-40cb-8b2e-556854e3bc1c\") " pod="openshift-marketplace/redhat-marketplace-lxq4r" Sep 29 19:51:58 crc kubenswrapper[4779]: I0929 19:51:58.733762 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vjjkj\" (UniqueName: \"kubernetes.io/projected/6cded7fe-a53d-40cb-8b2e-556854e3bc1c-kube-api-access-vjjkj\") pod \"redhat-marketplace-lxq4r\" (UID: \"6cded7fe-a53d-40cb-8b2e-556854e3bc1c\") " pod="openshift-marketplace/redhat-marketplace-lxq4r" Sep 29 19:51:58 crc kubenswrapper[4779]: I0929 19:51:58.857761 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-lxq4r" Sep 29 19:51:59 crc kubenswrapper[4779]: I0929 19:51:59.333749 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-lxq4r"] Sep 29 19:51:59 crc kubenswrapper[4779]: I0929 19:51:59.657651 4779 generic.go:334] "Generic (PLEG): container finished" podID="6cded7fe-a53d-40cb-8b2e-556854e3bc1c" containerID="2cbed0d36bffee630179cf3501758b1880b6866cfd7a4211b506615ab41f286c" exitCode=0 Sep 29 19:51:59 crc kubenswrapper[4779]: I0929 19:51:59.657713 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-lxq4r" event={"ID":"6cded7fe-a53d-40cb-8b2e-556854e3bc1c","Type":"ContainerDied","Data":"2cbed0d36bffee630179cf3501758b1880b6866cfd7a4211b506615ab41f286c"} Sep 29 19:51:59 crc kubenswrapper[4779]: I0929 19:51:59.657763 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-lxq4r" event={"ID":"6cded7fe-a53d-40cb-8b2e-556854e3bc1c","Type":"ContainerStarted","Data":"61947c1c87322e3b7320a7f3155d11c78ad52ac48228be84fb63c45e948d15ee"} Sep 29 19:52:01 crc kubenswrapper[4779]: I0929 19:52:01.679880 4779 generic.go:334] "Generic (PLEG): container finished" podID="6cded7fe-a53d-40cb-8b2e-556854e3bc1c" containerID="0dc087e3276551bd6ad639a02ee949b695bbe3644699487b9d4c4e2be712bd0a" exitCode=0 Sep 29 19:52:01 crc kubenswrapper[4779]: I0929 19:52:01.679984 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-lxq4r" event={"ID":"6cded7fe-a53d-40cb-8b2e-556854e3bc1c","Type":"ContainerDied","Data":"0dc087e3276551bd6ad639a02ee949b695bbe3644699487b9d4c4e2be712bd0a"} Sep 29 19:52:01 crc kubenswrapper[4779]: I0929 19:52:01.766728 4779 scope.go:117] "RemoveContainer" containerID="01e960f6c47393436db220d868c528f2bb6c5c1ddb0bfa806d83addfe11f5c5c" Sep 29 19:52:01 crc kubenswrapper[4779]: E0929 19:52:01.767172 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d5cnr_openshift-machine-config-operator(476bc421-1113-455e-bcc8-e207e47dad19)\"" pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" podUID="476bc421-1113-455e-bcc8-e207e47dad19" Sep 29 19:52:02 crc kubenswrapper[4779]: I0929 19:52:02.699267 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-lxq4r" event={"ID":"6cded7fe-a53d-40cb-8b2e-556854e3bc1c","Type":"ContainerStarted","Data":"72ffa2309d5740eb9181601439886bd75713df7bc9ce45a6d5e9c2c8b5eb376f"} Sep 29 19:52:02 crc kubenswrapper[4779]: I0929 19:52:02.728002 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-lxq4r" podStartSLOduration=2.124085176 podStartE2EDuration="4.727954397s" podCreationTimestamp="2025-09-29 19:51:58 +0000 UTC" firstStartedPulling="2025-09-29 19:51:59.659164785 +0000 UTC m=+2630.543589895" lastFinishedPulling="2025-09-29 19:52:02.263033986 +0000 UTC m=+2633.147459116" observedRunningTime="2025-09-29 19:52:02.722077206 +0000 UTC m=+2633.606502326" watchObservedRunningTime="2025-09-29 19:52:02.727954397 +0000 UTC m=+2633.612379507" Sep 29 19:52:08 crc kubenswrapper[4779]: I0929 19:52:08.858536 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-lxq4r" Sep 29 19:52:08 crc kubenswrapper[4779]: I0929 19:52:08.859404 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-lxq4r" Sep 29 19:52:08 crc kubenswrapper[4779]: I0929 19:52:08.918194 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-lxq4r" Sep 29 19:52:09 crc kubenswrapper[4779]: I0929 19:52:09.857077 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-lxq4r" Sep 29 19:52:09 crc kubenswrapper[4779]: I0929 19:52:09.922051 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-lxq4r"] Sep 29 19:52:11 crc kubenswrapper[4779]: I0929 19:52:11.798644 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-lxq4r" podUID="6cded7fe-a53d-40cb-8b2e-556854e3bc1c" containerName="registry-server" containerID="cri-o://72ffa2309d5740eb9181601439886bd75713df7bc9ce45a6d5e9c2c8b5eb376f" gracePeriod=2 Sep 29 19:52:12 crc kubenswrapper[4779]: I0929 19:52:12.325990 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-lxq4r" Sep 29 19:52:12 crc kubenswrapper[4779]: I0929 19:52:12.407971 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6cded7fe-a53d-40cb-8b2e-556854e3bc1c-utilities\") pod \"6cded7fe-a53d-40cb-8b2e-556854e3bc1c\" (UID: \"6cded7fe-a53d-40cb-8b2e-556854e3bc1c\") " Sep 29 19:52:12 crc kubenswrapper[4779]: I0929 19:52:12.408037 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vjjkj\" (UniqueName: \"kubernetes.io/projected/6cded7fe-a53d-40cb-8b2e-556854e3bc1c-kube-api-access-vjjkj\") pod \"6cded7fe-a53d-40cb-8b2e-556854e3bc1c\" (UID: \"6cded7fe-a53d-40cb-8b2e-556854e3bc1c\") " Sep 29 19:52:12 crc kubenswrapper[4779]: I0929 19:52:12.408172 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6cded7fe-a53d-40cb-8b2e-556854e3bc1c-catalog-content\") pod \"6cded7fe-a53d-40cb-8b2e-556854e3bc1c\" (UID: \"6cded7fe-a53d-40cb-8b2e-556854e3bc1c\") " Sep 29 19:52:12 crc kubenswrapper[4779]: I0929 19:52:12.408795 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6cded7fe-a53d-40cb-8b2e-556854e3bc1c-utilities" (OuterVolumeSpecName: "utilities") pod "6cded7fe-a53d-40cb-8b2e-556854e3bc1c" (UID: "6cded7fe-a53d-40cb-8b2e-556854e3bc1c"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 19:52:12 crc kubenswrapper[4779]: I0929 19:52:12.408925 4779 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6cded7fe-a53d-40cb-8b2e-556854e3bc1c-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 19:52:12 crc kubenswrapper[4779]: I0929 19:52:12.418229 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6cded7fe-a53d-40cb-8b2e-556854e3bc1c-kube-api-access-vjjkj" (OuterVolumeSpecName: "kube-api-access-vjjkj") pod "6cded7fe-a53d-40cb-8b2e-556854e3bc1c" (UID: "6cded7fe-a53d-40cb-8b2e-556854e3bc1c"). InnerVolumeSpecName "kube-api-access-vjjkj". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:52:12 crc kubenswrapper[4779]: I0929 19:52:12.426280 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6cded7fe-a53d-40cb-8b2e-556854e3bc1c-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "6cded7fe-a53d-40cb-8b2e-556854e3bc1c" (UID: "6cded7fe-a53d-40cb-8b2e-556854e3bc1c"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 19:52:12 crc kubenswrapper[4779]: I0929 19:52:12.511142 4779 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6cded7fe-a53d-40cb-8b2e-556854e3bc1c-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 19:52:12 crc kubenswrapper[4779]: I0929 19:52:12.511193 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vjjkj\" (UniqueName: \"kubernetes.io/projected/6cded7fe-a53d-40cb-8b2e-556854e3bc1c-kube-api-access-vjjkj\") on node \"crc\" DevicePath \"\"" Sep 29 19:52:12 crc kubenswrapper[4779]: I0929 19:52:12.811516 4779 generic.go:334] "Generic (PLEG): container finished" podID="6cded7fe-a53d-40cb-8b2e-556854e3bc1c" containerID="72ffa2309d5740eb9181601439886bd75713df7bc9ce45a6d5e9c2c8b5eb376f" exitCode=0 Sep 29 19:52:12 crc kubenswrapper[4779]: I0929 19:52:12.811597 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-lxq4r" Sep 29 19:52:12 crc kubenswrapper[4779]: I0929 19:52:12.811602 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-lxq4r" event={"ID":"6cded7fe-a53d-40cb-8b2e-556854e3bc1c","Type":"ContainerDied","Data":"72ffa2309d5740eb9181601439886bd75713df7bc9ce45a6d5e9c2c8b5eb376f"} Sep 29 19:52:12 crc kubenswrapper[4779]: I0929 19:52:12.811684 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-lxq4r" event={"ID":"6cded7fe-a53d-40cb-8b2e-556854e3bc1c","Type":"ContainerDied","Data":"61947c1c87322e3b7320a7f3155d11c78ad52ac48228be84fb63c45e948d15ee"} Sep 29 19:52:12 crc kubenswrapper[4779]: I0929 19:52:12.811720 4779 scope.go:117] "RemoveContainer" containerID="72ffa2309d5740eb9181601439886bd75713df7bc9ce45a6d5e9c2c8b5eb376f" Sep 29 19:52:12 crc kubenswrapper[4779]: I0929 19:52:12.843277 4779 scope.go:117] "RemoveContainer" containerID="0dc087e3276551bd6ad639a02ee949b695bbe3644699487b9d4c4e2be712bd0a" Sep 29 19:52:12 crc kubenswrapper[4779]: I0929 19:52:12.851822 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-lxq4r"] Sep 29 19:52:12 crc kubenswrapper[4779]: I0929 19:52:12.865425 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-lxq4r"] Sep 29 19:52:12 crc kubenswrapper[4779]: I0929 19:52:12.900772 4779 scope.go:117] "RemoveContainer" containerID="2cbed0d36bffee630179cf3501758b1880b6866cfd7a4211b506615ab41f286c" Sep 29 19:52:12 crc kubenswrapper[4779]: I0929 19:52:12.921255 4779 scope.go:117] "RemoveContainer" containerID="72ffa2309d5740eb9181601439886bd75713df7bc9ce45a6d5e9c2c8b5eb376f" Sep 29 19:52:12 crc kubenswrapper[4779]: E0929 19:52:12.921914 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"72ffa2309d5740eb9181601439886bd75713df7bc9ce45a6d5e9c2c8b5eb376f\": container with ID starting with 72ffa2309d5740eb9181601439886bd75713df7bc9ce45a6d5e9c2c8b5eb376f not found: ID does not exist" containerID="72ffa2309d5740eb9181601439886bd75713df7bc9ce45a6d5e9c2c8b5eb376f" Sep 29 19:52:12 crc kubenswrapper[4779]: I0929 19:52:12.921949 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"72ffa2309d5740eb9181601439886bd75713df7bc9ce45a6d5e9c2c8b5eb376f"} err="failed to get container status \"72ffa2309d5740eb9181601439886bd75713df7bc9ce45a6d5e9c2c8b5eb376f\": rpc error: code = NotFound desc = could not find container \"72ffa2309d5740eb9181601439886bd75713df7bc9ce45a6d5e9c2c8b5eb376f\": container with ID starting with 72ffa2309d5740eb9181601439886bd75713df7bc9ce45a6d5e9c2c8b5eb376f not found: ID does not exist" Sep 29 19:52:12 crc kubenswrapper[4779]: I0929 19:52:12.921974 4779 scope.go:117] "RemoveContainer" containerID="0dc087e3276551bd6ad639a02ee949b695bbe3644699487b9d4c4e2be712bd0a" Sep 29 19:52:12 crc kubenswrapper[4779]: E0929 19:52:12.922331 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0dc087e3276551bd6ad639a02ee949b695bbe3644699487b9d4c4e2be712bd0a\": container with ID starting with 0dc087e3276551bd6ad639a02ee949b695bbe3644699487b9d4c4e2be712bd0a not found: ID does not exist" containerID="0dc087e3276551bd6ad639a02ee949b695bbe3644699487b9d4c4e2be712bd0a" Sep 29 19:52:12 crc kubenswrapper[4779]: I0929 19:52:12.922360 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0dc087e3276551bd6ad639a02ee949b695bbe3644699487b9d4c4e2be712bd0a"} err="failed to get container status \"0dc087e3276551bd6ad639a02ee949b695bbe3644699487b9d4c4e2be712bd0a\": rpc error: code = NotFound desc = could not find container \"0dc087e3276551bd6ad639a02ee949b695bbe3644699487b9d4c4e2be712bd0a\": container with ID starting with 0dc087e3276551bd6ad639a02ee949b695bbe3644699487b9d4c4e2be712bd0a not found: ID does not exist" Sep 29 19:52:12 crc kubenswrapper[4779]: I0929 19:52:12.922377 4779 scope.go:117] "RemoveContainer" containerID="2cbed0d36bffee630179cf3501758b1880b6866cfd7a4211b506615ab41f286c" Sep 29 19:52:12 crc kubenswrapper[4779]: E0929 19:52:12.922630 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2cbed0d36bffee630179cf3501758b1880b6866cfd7a4211b506615ab41f286c\": container with ID starting with 2cbed0d36bffee630179cf3501758b1880b6866cfd7a4211b506615ab41f286c not found: ID does not exist" containerID="2cbed0d36bffee630179cf3501758b1880b6866cfd7a4211b506615ab41f286c" Sep 29 19:52:12 crc kubenswrapper[4779]: I0929 19:52:12.922656 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2cbed0d36bffee630179cf3501758b1880b6866cfd7a4211b506615ab41f286c"} err="failed to get container status \"2cbed0d36bffee630179cf3501758b1880b6866cfd7a4211b506615ab41f286c\": rpc error: code = NotFound desc = could not find container \"2cbed0d36bffee630179cf3501758b1880b6866cfd7a4211b506615ab41f286c\": container with ID starting with 2cbed0d36bffee630179cf3501758b1880b6866cfd7a4211b506615ab41f286c not found: ID does not exist" Sep 29 19:52:13 crc kubenswrapper[4779]: I0929 19:52:13.766809 4779 scope.go:117] "RemoveContainer" containerID="01e960f6c47393436db220d868c528f2bb6c5c1ddb0bfa806d83addfe11f5c5c" Sep 29 19:52:13 crc kubenswrapper[4779]: E0929 19:52:13.767423 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d5cnr_openshift-machine-config-operator(476bc421-1113-455e-bcc8-e207e47dad19)\"" pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" podUID="476bc421-1113-455e-bcc8-e207e47dad19" Sep 29 19:52:13 crc kubenswrapper[4779]: I0929 19:52:13.789204 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6cded7fe-a53d-40cb-8b2e-556854e3bc1c" path="/var/lib/kubelet/pods/6cded7fe-a53d-40cb-8b2e-556854e3bc1c/volumes" Sep 29 19:52:25 crc kubenswrapper[4779]: I0929 19:52:25.767140 4779 scope.go:117] "RemoveContainer" containerID="01e960f6c47393436db220d868c528f2bb6c5c1ddb0bfa806d83addfe11f5c5c" Sep 29 19:52:25 crc kubenswrapper[4779]: E0929 19:52:25.767985 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d5cnr_openshift-machine-config-operator(476bc421-1113-455e-bcc8-e207e47dad19)\"" pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" podUID="476bc421-1113-455e-bcc8-e207e47dad19" Sep 29 19:52:26 crc kubenswrapper[4779]: I0929 19:52:26.994072 4779 generic.go:334] "Generic (PLEG): container finished" podID="a8bc7976-d585-4a94-b925-870996cc4ae3" containerID="17aeedf2a5a682b7a45caf204c4b59157be154fcff8fb0f41e6c6d49caf3115f" exitCode=0 Sep 29 19:52:26 crc kubenswrapper[4779]: I0929 19:52:26.994139 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-vf74d" event={"ID":"a8bc7976-d585-4a94-b925-870996cc4ae3","Type":"ContainerDied","Data":"17aeedf2a5a682b7a45caf204c4b59157be154fcff8fb0f41e6c6d49caf3115f"} Sep 29 19:52:28 crc kubenswrapper[4779]: I0929 19:52:28.552989 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-vf74d" Sep 29 19:52:28 crc kubenswrapper[4779]: I0929 19:52:28.667797 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a8bc7976-d585-4a94-b925-870996cc4ae3-ssh-key\") pod \"a8bc7976-d585-4a94-b925-870996cc4ae3\" (UID: \"a8bc7976-d585-4a94-b925-870996cc4ae3\") " Sep 29 19:52:28 crc kubenswrapper[4779]: I0929 19:52:28.667886 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/a8bc7976-d585-4a94-b925-870996cc4ae3-ceilometer-compute-config-data-0\") pod \"a8bc7976-d585-4a94-b925-870996cc4ae3\" (UID: \"a8bc7976-d585-4a94-b925-870996cc4ae3\") " Sep 29 19:52:28 crc kubenswrapper[4779]: I0929 19:52:28.667915 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/a8bc7976-d585-4a94-b925-870996cc4ae3-ceilometer-compute-config-data-2\") pod \"a8bc7976-d585-4a94-b925-870996cc4ae3\" (UID: \"a8bc7976-d585-4a94-b925-870996cc4ae3\") " Sep 29 19:52:28 crc kubenswrapper[4779]: I0929 19:52:28.668042 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a8bc7976-d585-4a94-b925-870996cc4ae3-inventory\") pod \"a8bc7976-d585-4a94-b925-870996cc4ae3\" (UID: \"a8bc7976-d585-4a94-b925-870996cc4ae3\") " Sep 29 19:52:28 crc kubenswrapper[4779]: I0929 19:52:28.668077 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/a8bc7976-d585-4a94-b925-870996cc4ae3-ceilometer-compute-config-data-1\") pod \"a8bc7976-d585-4a94-b925-870996cc4ae3\" (UID: \"a8bc7976-d585-4a94-b925-870996cc4ae3\") " Sep 29 19:52:28 crc kubenswrapper[4779]: I0929 19:52:28.668110 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-46pj6\" (UniqueName: \"kubernetes.io/projected/a8bc7976-d585-4a94-b925-870996cc4ae3-kube-api-access-46pj6\") pod \"a8bc7976-d585-4a94-b925-870996cc4ae3\" (UID: \"a8bc7976-d585-4a94-b925-870996cc4ae3\") " Sep 29 19:52:28 crc kubenswrapper[4779]: I0929 19:52:28.668152 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a8bc7976-d585-4a94-b925-870996cc4ae3-telemetry-combined-ca-bundle\") pod \"a8bc7976-d585-4a94-b925-870996cc4ae3\" (UID: \"a8bc7976-d585-4a94-b925-870996cc4ae3\") " Sep 29 19:52:28 crc kubenswrapper[4779]: I0929 19:52:28.674069 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a8bc7976-d585-4a94-b925-870996cc4ae3-kube-api-access-46pj6" (OuterVolumeSpecName: "kube-api-access-46pj6") pod "a8bc7976-d585-4a94-b925-870996cc4ae3" (UID: "a8bc7976-d585-4a94-b925-870996cc4ae3"). InnerVolumeSpecName "kube-api-access-46pj6". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:52:28 crc kubenswrapper[4779]: I0929 19:52:28.676788 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a8bc7976-d585-4a94-b925-870996cc4ae3-telemetry-combined-ca-bundle" (OuterVolumeSpecName: "telemetry-combined-ca-bundle") pod "a8bc7976-d585-4a94-b925-870996cc4ae3" (UID: "a8bc7976-d585-4a94-b925-870996cc4ae3"). InnerVolumeSpecName "telemetry-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:52:28 crc kubenswrapper[4779]: I0929 19:52:28.704524 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a8bc7976-d585-4a94-b925-870996cc4ae3-inventory" (OuterVolumeSpecName: "inventory") pod "a8bc7976-d585-4a94-b925-870996cc4ae3" (UID: "a8bc7976-d585-4a94-b925-870996cc4ae3"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:52:28 crc kubenswrapper[4779]: I0929 19:52:28.705215 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a8bc7976-d585-4a94-b925-870996cc4ae3-ceilometer-compute-config-data-1" (OuterVolumeSpecName: "ceilometer-compute-config-data-1") pod "a8bc7976-d585-4a94-b925-870996cc4ae3" (UID: "a8bc7976-d585-4a94-b925-870996cc4ae3"). InnerVolumeSpecName "ceilometer-compute-config-data-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:52:28 crc kubenswrapper[4779]: I0929 19:52:28.707669 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a8bc7976-d585-4a94-b925-870996cc4ae3-ceilometer-compute-config-data-0" (OuterVolumeSpecName: "ceilometer-compute-config-data-0") pod "a8bc7976-d585-4a94-b925-870996cc4ae3" (UID: "a8bc7976-d585-4a94-b925-870996cc4ae3"). InnerVolumeSpecName "ceilometer-compute-config-data-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:52:28 crc kubenswrapper[4779]: I0929 19:52:28.728726 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a8bc7976-d585-4a94-b925-870996cc4ae3-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "a8bc7976-d585-4a94-b925-870996cc4ae3" (UID: "a8bc7976-d585-4a94-b925-870996cc4ae3"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:52:28 crc kubenswrapper[4779]: I0929 19:52:28.730060 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a8bc7976-d585-4a94-b925-870996cc4ae3-ceilometer-compute-config-data-2" (OuterVolumeSpecName: "ceilometer-compute-config-data-2") pod "a8bc7976-d585-4a94-b925-870996cc4ae3" (UID: "a8bc7976-d585-4a94-b925-870996cc4ae3"). InnerVolumeSpecName "ceilometer-compute-config-data-2". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 19:52:28 crc kubenswrapper[4779]: I0929 19:52:28.770864 4779 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a8bc7976-d585-4a94-b925-870996cc4ae3-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 29 19:52:28 crc kubenswrapper[4779]: I0929 19:52:28.770912 4779 reconciler_common.go:293] "Volume detached for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/a8bc7976-d585-4a94-b925-870996cc4ae3-ceilometer-compute-config-data-0\") on node \"crc\" DevicePath \"\"" Sep 29 19:52:28 crc kubenswrapper[4779]: I0929 19:52:28.770934 4779 reconciler_common.go:293] "Volume detached for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/a8bc7976-d585-4a94-b925-870996cc4ae3-ceilometer-compute-config-data-2\") on node \"crc\" DevicePath \"\"" Sep 29 19:52:28 crc kubenswrapper[4779]: I0929 19:52:28.770953 4779 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a8bc7976-d585-4a94-b925-870996cc4ae3-inventory\") on node \"crc\" DevicePath \"\"" Sep 29 19:52:28 crc kubenswrapper[4779]: I0929 19:52:28.770971 4779 reconciler_common.go:293] "Volume detached for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/a8bc7976-d585-4a94-b925-870996cc4ae3-ceilometer-compute-config-data-1\") on node \"crc\" DevicePath \"\"" Sep 29 19:52:28 crc kubenswrapper[4779]: I0929 19:52:28.770990 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-46pj6\" (UniqueName: \"kubernetes.io/projected/a8bc7976-d585-4a94-b925-870996cc4ae3-kube-api-access-46pj6\") on node \"crc\" DevicePath \"\"" Sep 29 19:52:28 crc kubenswrapper[4779]: I0929 19:52:28.771008 4779 reconciler_common.go:293] "Volume detached for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a8bc7976-d585-4a94-b925-870996cc4ae3-telemetry-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 19:52:29 crc kubenswrapper[4779]: I0929 19:52:29.019783 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-vf74d" event={"ID":"a8bc7976-d585-4a94-b925-870996cc4ae3","Type":"ContainerDied","Data":"a6c5e10c78f485ccacdf1498a3ad251229f2defeef38c7185a1c7f226f200c6b"} Sep 29 19:52:29 crc kubenswrapper[4779]: I0929 19:52:29.019832 4779 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a6c5e10c78f485ccacdf1498a3ad251229f2defeef38c7185a1c7f226f200c6b" Sep 29 19:52:29 crc kubenswrapper[4779]: I0929 19:52:29.019912 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-vf74d" Sep 29 19:52:38 crc kubenswrapper[4779]: I0929 19:52:38.767639 4779 scope.go:117] "RemoveContainer" containerID="01e960f6c47393436db220d868c528f2bb6c5c1ddb0bfa806d83addfe11f5c5c" Sep 29 19:52:38 crc kubenswrapper[4779]: E0929 19:52:38.768681 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d5cnr_openshift-machine-config-operator(476bc421-1113-455e-bcc8-e207e47dad19)\"" pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" podUID="476bc421-1113-455e-bcc8-e207e47dad19" Sep 29 19:52:52 crc kubenswrapper[4779]: I0929 19:52:52.766635 4779 scope.go:117] "RemoveContainer" containerID="01e960f6c47393436db220d868c528f2bb6c5c1ddb0bfa806d83addfe11f5c5c" Sep 29 19:52:52 crc kubenswrapper[4779]: E0929 19:52:52.767292 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d5cnr_openshift-machine-config-operator(476bc421-1113-455e-bcc8-e207e47dad19)\"" pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" podUID="476bc421-1113-455e-bcc8-e207e47dad19" Sep 29 19:53:04 crc kubenswrapper[4779]: I0929 19:53:04.766302 4779 scope.go:117] "RemoveContainer" containerID="01e960f6c47393436db220d868c528f2bb6c5c1ddb0bfa806d83addfe11f5c5c" Sep 29 19:53:04 crc kubenswrapper[4779]: E0929 19:53:04.768070 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d5cnr_openshift-machine-config-operator(476bc421-1113-455e-bcc8-e207e47dad19)\"" pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" podUID="476bc421-1113-455e-bcc8-e207e47dad19" Sep 29 19:53:19 crc kubenswrapper[4779]: I0929 19:53:19.773265 4779 scope.go:117] "RemoveContainer" containerID="01e960f6c47393436db220d868c528f2bb6c5c1ddb0bfa806d83addfe11f5c5c" Sep 29 19:53:20 crc kubenswrapper[4779]: I0929 19:53:20.600949 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" event={"ID":"476bc421-1113-455e-bcc8-e207e47dad19","Type":"ContainerStarted","Data":"b21c66ee30542a53761986d79c2d6a62c4d6497440c6bb49825d675e2825fb98"} Sep 29 19:53:25 crc kubenswrapper[4779]: I0929 19:53:25.679649 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/tempest-tests-tempest"] Sep 29 19:53:25 crc kubenswrapper[4779]: E0929 19:53:25.680931 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6cded7fe-a53d-40cb-8b2e-556854e3bc1c" containerName="extract-content" Sep 29 19:53:25 crc kubenswrapper[4779]: I0929 19:53:25.680976 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="6cded7fe-a53d-40cb-8b2e-556854e3bc1c" containerName="extract-content" Sep 29 19:53:25 crc kubenswrapper[4779]: E0929 19:53:25.681000 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a8bc7976-d585-4a94-b925-870996cc4ae3" containerName="telemetry-edpm-deployment-openstack-edpm-ipam" Sep 29 19:53:25 crc kubenswrapper[4779]: I0929 19:53:25.681011 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="a8bc7976-d585-4a94-b925-870996cc4ae3" containerName="telemetry-edpm-deployment-openstack-edpm-ipam" Sep 29 19:53:25 crc kubenswrapper[4779]: E0929 19:53:25.681028 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6cded7fe-a53d-40cb-8b2e-556854e3bc1c" containerName="extract-utilities" Sep 29 19:53:25 crc kubenswrapper[4779]: I0929 19:53:25.681061 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="6cded7fe-a53d-40cb-8b2e-556854e3bc1c" containerName="extract-utilities" Sep 29 19:53:25 crc kubenswrapper[4779]: E0929 19:53:25.681096 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6cded7fe-a53d-40cb-8b2e-556854e3bc1c" containerName="registry-server" Sep 29 19:53:25 crc kubenswrapper[4779]: I0929 19:53:25.681104 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="6cded7fe-a53d-40cb-8b2e-556854e3bc1c" containerName="registry-server" Sep 29 19:53:25 crc kubenswrapper[4779]: I0929 19:53:25.681449 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="6cded7fe-a53d-40cb-8b2e-556854e3bc1c" containerName="registry-server" Sep 29 19:53:25 crc kubenswrapper[4779]: I0929 19:53:25.681474 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="a8bc7976-d585-4a94-b925-870996cc4ae3" containerName="telemetry-edpm-deployment-openstack-edpm-ipam" Sep 29 19:53:25 crc kubenswrapper[4779]: I0929 19:53:25.682838 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest" Sep 29 19:53:25 crc kubenswrapper[4779]: I0929 19:53:25.685791 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"tempest-tests-tempest-env-vars-s0" Sep 29 19:53:25 crc kubenswrapper[4779]: I0929 19:53:25.685886 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"tempest-tests-tempest-custom-data-s0" Sep 29 19:53:25 crc kubenswrapper[4779]: I0929 19:53:25.686542 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"default-dockercfg-27mvk" Sep 29 19:53:25 crc kubenswrapper[4779]: I0929 19:53:25.686580 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"test-operator-controller-priv-key" Sep 29 19:53:25 crc kubenswrapper[4779]: I0929 19:53:25.699689 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/tempest-tests-tempest"] Sep 29 19:53:25 crc kubenswrapper[4779]: I0929 19:53:25.764685 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nljkg\" (UniqueName: \"kubernetes.io/projected/8bf09edd-ad1f-4883-ade0-8082b2055f60-kube-api-access-nljkg\") pod \"tempest-tests-tempest\" (UID: \"8bf09edd-ad1f-4883-ade0-8082b2055f60\") " pod="openstack/tempest-tests-tempest" Sep 29 19:53:25 crc kubenswrapper[4779]: I0929 19:53:25.764810 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/8bf09edd-ad1f-4883-ade0-8082b2055f60-openstack-config\") pod \"tempest-tests-tempest\" (UID: \"8bf09edd-ad1f-4883-ade0-8082b2055f60\") " pod="openstack/tempest-tests-tempest" Sep 29 19:53:25 crc kubenswrapper[4779]: I0929 19:53:25.764856 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/8bf09edd-ad1f-4883-ade0-8082b2055f60-test-operator-ephemeral-workdir\") pod \"tempest-tests-tempest\" (UID: \"8bf09edd-ad1f-4883-ade0-8082b2055f60\") " pod="openstack/tempest-tests-tempest" Sep 29 19:53:25 crc kubenswrapper[4779]: I0929 19:53:25.764901 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8bf09edd-ad1f-4883-ade0-8082b2055f60-ssh-key\") pod \"tempest-tests-tempest\" (UID: \"8bf09edd-ad1f-4883-ade0-8082b2055f60\") " pod="openstack/tempest-tests-tempest" Sep 29 19:53:25 crc kubenswrapper[4779]: I0929 19:53:25.765009 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/8bf09edd-ad1f-4883-ade0-8082b2055f60-openstack-config-secret\") pod \"tempest-tests-tempest\" (UID: \"8bf09edd-ad1f-4883-ade0-8082b2055f60\") " pod="openstack/tempest-tests-tempest" Sep 29 19:53:25 crc kubenswrapper[4779]: I0929 19:53:25.765167 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/8bf09edd-ad1f-4883-ade0-8082b2055f60-ca-certs\") pod \"tempest-tests-tempest\" (UID: \"8bf09edd-ad1f-4883-ade0-8082b2055f60\") " pod="openstack/tempest-tests-tempest" Sep 29 19:53:25 crc kubenswrapper[4779]: I0929 19:53:25.765274 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/8bf09edd-ad1f-4883-ade0-8082b2055f60-config-data\") pod \"tempest-tests-tempest\" (UID: \"8bf09edd-ad1f-4883-ade0-8082b2055f60\") " pod="openstack/tempest-tests-tempest" Sep 29 19:53:25 crc kubenswrapper[4779]: I0929 19:53:25.765347 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/8bf09edd-ad1f-4883-ade0-8082b2055f60-test-operator-ephemeral-temporary\") pod \"tempest-tests-tempest\" (UID: \"8bf09edd-ad1f-4883-ade0-8082b2055f60\") " pod="openstack/tempest-tests-tempest" Sep 29 19:53:25 crc kubenswrapper[4779]: I0929 19:53:25.765411 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"tempest-tests-tempest\" (UID: \"8bf09edd-ad1f-4883-ade0-8082b2055f60\") " pod="openstack/tempest-tests-tempest" Sep 29 19:53:25 crc kubenswrapper[4779]: I0929 19:53:25.868136 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/8bf09edd-ad1f-4883-ade0-8082b2055f60-ca-certs\") pod \"tempest-tests-tempest\" (UID: \"8bf09edd-ad1f-4883-ade0-8082b2055f60\") " pod="openstack/tempest-tests-tempest" Sep 29 19:53:25 crc kubenswrapper[4779]: I0929 19:53:25.868283 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/8bf09edd-ad1f-4883-ade0-8082b2055f60-config-data\") pod \"tempest-tests-tempest\" (UID: \"8bf09edd-ad1f-4883-ade0-8082b2055f60\") " pod="openstack/tempest-tests-tempest" Sep 29 19:53:25 crc kubenswrapper[4779]: I0929 19:53:25.868384 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/8bf09edd-ad1f-4883-ade0-8082b2055f60-test-operator-ephemeral-temporary\") pod \"tempest-tests-tempest\" (UID: \"8bf09edd-ad1f-4883-ade0-8082b2055f60\") " pod="openstack/tempest-tests-tempest" Sep 29 19:53:25 crc kubenswrapper[4779]: I0929 19:53:25.868448 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"tempest-tests-tempest\" (UID: \"8bf09edd-ad1f-4883-ade0-8082b2055f60\") " pod="openstack/tempest-tests-tempest" Sep 29 19:53:25 crc kubenswrapper[4779]: I0929 19:53:25.868544 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nljkg\" (UniqueName: \"kubernetes.io/projected/8bf09edd-ad1f-4883-ade0-8082b2055f60-kube-api-access-nljkg\") pod \"tempest-tests-tempest\" (UID: \"8bf09edd-ad1f-4883-ade0-8082b2055f60\") " pod="openstack/tempest-tests-tempest" Sep 29 19:53:25 crc kubenswrapper[4779]: I0929 19:53:25.868596 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/8bf09edd-ad1f-4883-ade0-8082b2055f60-openstack-config\") pod \"tempest-tests-tempest\" (UID: \"8bf09edd-ad1f-4883-ade0-8082b2055f60\") " pod="openstack/tempest-tests-tempest" Sep 29 19:53:25 crc kubenswrapper[4779]: I0929 19:53:25.868632 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/8bf09edd-ad1f-4883-ade0-8082b2055f60-test-operator-ephemeral-workdir\") pod \"tempest-tests-tempest\" (UID: \"8bf09edd-ad1f-4883-ade0-8082b2055f60\") " pod="openstack/tempest-tests-tempest" Sep 29 19:53:25 crc kubenswrapper[4779]: I0929 19:53:25.868668 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8bf09edd-ad1f-4883-ade0-8082b2055f60-ssh-key\") pod \"tempest-tests-tempest\" (UID: \"8bf09edd-ad1f-4883-ade0-8082b2055f60\") " pod="openstack/tempest-tests-tempest" Sep 29 19:53:25 crc kubenswrapper[4779]: I0929 19:53:25.868714 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/8bf09edd-ad1f-4883-ade0-8082b2055f60-openstack-config-secret\") pod \"tempest-tests-tempest\" (UID: \"8bf09edd-ad1f-4883-ade0-8082b2055f60\") " pod="openstack/tempest-tests-tempest" Sep 29 19:53:25 crc kubenswrapper[4779]: I0929 19:53:25.869411 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/8bf09edd-ad1f-4883-ade0-8082b2055f60-test-operator-ephemeral-temporary\") pod \"tempest-tests-tempest\" (UID: \"8bf09edd-ad1f-4883-ade0-8082b2055f60\") " pod="openstack/tempest-tests-tempest" Sep 29 19:53:25 crc kubenswrapper[4779]: I0929 19:53:25.869585 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/8bf09edd-ad1f-4883-ade0-8082b2055f60-test-operator-ephemeral-workdir\") pod \"tempest-tests-tempest\" (UID: \"8bf09edd-ad1f-4883-ade0-8082b2055f60\") " pod="openstack/tempest-tests-tempest" Sep 29 19:53:25 crc kubenswrapper[4779]: I0929 19:53:25.870724 4779 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"tempest-tests-tempest\" (UID: \"8bf09edd-ad1f-4883-ade0-8082b2055f60\") device mount path \"/mnt/openstack/pv11\"" pod="openstack/tempest-tests-tempest" Sep 29 19:53:25 crc kubenswrapper[4779]: I0929 19:53:25.871350 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/8bf09edd-ad1f-4883-ade0-8082b2055f60-openstack-config\") pod \"tempest-tests-tempest\" (UID: \"8bf09edd-ad1f-4883-ade0-8082b2055f60\") " pod="openstack/tempest-tests-tempest" Sep 29 19:53:25 crc kubenswrapper[4779]: I0929 19:53:25.871452 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/8bf09edd-ad1f-4883-ade0-8082b2055f60-config-data\") pod \"tempest-tests-tempest\" (UID: \"8bf09edd-ad1f-4883-ade0-8082b2055f60\") " pod="openstack/tempest-tests-tempest" Sep 29 19:53:25 crc kubenswrapper[4779]: I0929 19:53:25.877436 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/8bf09edd-ad1f-4883-ade0-8082b2055f60-openstack-config-secret\") pod \"tempest-tests-tempest\" (UID: \"8bf09edd-ad1f-4883-ade0-8082b2055f60\") " pod="openstack/tempest-tests-tempest" Sep 29 19:53:25 crc kubenswrapper[4779]: I0929 19:53:25.877596 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8bf09edd-ad1f-4883-ade0-8082b2055f60-ssh-key\") pod \"tempest-tests-tempest\" (UID: \"8bf09edd-ad1f-4883-ade0-8082b2055f60\") " pod="openstack/tempest-tests-tempest" Sep 29 19:53:25 crc kubenswrapper[4779]: I0929 19:53:25.886147 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/8bf09edd-ad1f-4883-ade0-8082b2055f60-ca-certs\") pod \"tempest-tests-tempest\" (UID: \"8bf09edd-ad1f-4883-ade0-8082b2055f60\") " pod="openstack/tempest-tests-tempest" Sep 29 19:53:25 crc kubenswrapper[4779]: I0929 19:53:25.905229 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nljkg\" (UniqueName: \"kubernetes.io/projected/8bf09edd-ad1f-4883-ade0-8082b2055f60-kube-api-access-nljkg\") pod \"tempest-tests-tempest\" (UID: \"8bf09edd-ad1f-4883-ade0-8082b2055f60\") " pod="openstack/tempest-tests-tempest" Sep 29 19:53:25 crc kubenswrapper[4779]: I0929 19:53:25.928232 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"tempest-tests-tempest\" (UID: \"8bf09edd-ad1f-4883-ade0-8082b2055f60\") " pod="openstack/tempest-tests-tempest" Sep 29 19:53:26 crc kubenswrapper[4779]: I0929 19:53:26.007482 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest" Sep 29 19:53:26 crc kubenswrapper[4779]: I0929 19:53:26.309068 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/tempest-tests-tempest"] Sep 29 19:53:26 crc kubenswrapper[4779]: W0929 19:53:26.315677 4779 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8bf09edd_ad1f_4883_ade0_8082b2055f60.slice/crio-7f479f4c94155f7586cfbb03066fb739e46acfdbd771b9c241a12d09914a9dfd WatchSource:0}: Error finding container 7f479f4c94155f7586cfbb03066fb739e46acfdbd771b9c241a12d09914a9dfd: Status 404 returned error can't find the container with id 7f479f4c94155f7586cfbb03066fb739e46acfdbd771b9c241a12d09914a9dfd Sep 29 19:53:26 crc kubenswrapper[4779]: I0929 19:53:26.677579 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest" event={"ID":"8bf09edd-ad1f-4883-ade0-8082b2055f60","Type":"ContainerStarted","Data":"7f479f4c94155f7586cfbb03066fb739e46acfdbd771b9c241a12d09914a9dfd"} Sep 29 19:53:57 crc kubenswrapper[4779]: E0929 19:53:57.380036 4779 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-tempest-all:current-podified" Sep 29 19:53:57 crc kubenswrapper[4779]: E0929 19:53:57.380950 4779 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:tempest-tests-tempest-tests-runner,Image:quay.io/podified-antelope-centos9/openstack-tempest-all:current-podified,Command:[],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:test-operator-ephemeral-workdir,ReadOnly:false,MountPath:/var/lib/tempest,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:test-operator-ephemeral-temporary,ReadOnly:false,MountPath:/tmp,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:false,MountPath:/etc/test_operator,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:test-operator-logs,ReadOnly:false,MountPath:/var/lib/tempest/external_files,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:openstack-config,ReadOnly:true,MountPath:/etc/openstack/clouds.yaml,SubPath:clouds.yaml,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:openstack-config,ReadOnly:true,MountPath:/var/lib/tempest/.config/openstack/clouds.yaml,SubPath:clouds.yaml,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:openstack-config-secret,ReadOnly:false,MountPath:/etc/openstack/secure.yaml,SubPath:secure.yaml,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ca-certs,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ssh-key,ReadOnly:false,MountPath:/var/lib/tempest/id_ecdsa,SubPath:ssh_key,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-nljkg,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42480,RunAsNonRoot:*false,ReadOnlyRootFilesystem:*false,AllowPrivilegeEscalation:*true,RunAsGroup:*42480,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{EnvFromSource{Prefix:,ConfigMapRef:&ConfigMapEnvSource{LocalObjectReference:LocalObjectReference{Name:tempest-tests-tempest-custom-data-s0,},Optional:nil,},SecretRef:nil,},EnvFromSource{Prefix:,ConfigMapRef:&ConfigMapEnvSource{LocalObjectReference:LocalObjectReference{Name:tempest-tests-tempest-env-vars-s0,},Optional:nil,},SecretRef:nil,},},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod tempest-tests-tempest_openstack(8bf09edd-ad1f-4883-ade0-8082b2055f60): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Sep 29 19:53:57 crc kubenswrapper[4779]: E0929 19:53:57.382995 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"tempest-tests-tempest-tests-runner\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/tempest-tests-tempest" podUID="8bf09edd-ad1f-4883-ade0-8082b2055f60" Sep 29 19:53:57 crc kubenswrapper[4779]: E0929 19:53:57.997631 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"tempest-tests-tempest-tests-runner\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-tempest-all:current-podified\\\"\"" pod="openstack/tempest-tests-tempest" podUID="8bf09edd-ad1f-4883-ade0-8082b2055f60" Sep 29 19:54:13 crc kubenswrapper[4779]: I0929 19:54:13.267717 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"tempest-tests-tempest-env-vars-s0" Sep 29 19:54:15 crc kubenswrapper[4779]: I0929 19:54:15.192056 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest" event={"ID":"8bf09edd-ad1f-4883-ade0-8082b2055f60","Type":"ContainerStarted","Data":"bf9dd8426e4e5b7efb6e15d9e65d28271c2cfeccddbf35f0cbb187d353b9ecd0"} Sep 29 19:54:15 crc kubenswrapper[4779]: I0929 19:54:15.217891 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/tempest-tests-tempest" podStartSLOduration=4.271274528 podStartE2EDuration="51.217870223s" podCreationTimestamp="2025-09-29 19:53:24 +0000 UTC" firstStartedPulling="2025-09-29 19:53:26.318150393 +0000 UTC m=+2717.202575503" lastFinishedPulling="2025-09-29 19:54:13.264746058 +0000 UTC m=+2764.149171198" observedRunningTime="2025-09-29 19:54:15.213719159 +0000 UTC m=+2766.098144289" watchObservedRunningTime="2025-09-29 19:54:15.217870223 +0000 UTC m=+2766.102295313" Sep 29 19:54:52 crc kubenswrapper[4779]: I0929 19:54:52.451256 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-tldmz"] Sep 29 19:54:52 crc kubenswrapper[4779]: I0929 19:54:52.458530 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-tldmz" Sep 29 19:54:52 crc kubenswrapper[4779]: I0929 19:54:52.472156 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-tldmz"] Sep 29 19:54:52 crc kubenswrapper[4779]: I0929 19:54:52.598858 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/200674e6-be9c-4d6d-be45-8648672e4fb6-utilities\") pod \"certified-operators-tldmz\" (UID: \"200674e6-be9c-4d6d-be45-8648672e4fb6\") " pod="openshift-marketplace/certified-operators-tldmz" Sep 29 19:54:52 crc kubenswrapper[4779]: I0929 19:54:52.598928 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/200674e6-be9c-4d6d-be45-8648672e4fb6-catalog-content\") pod \"certified-operators-tldmz\" (UID: \"200674e6-be9c-4d6d-be45-8648672e4fb6\") " pod="openshift-marketplace/certified-operators-tldmz" Sep 29 19:54:52 crc kubenswrapper[4779]: I0929 19:54:52.599240 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xmtgd\" (UniqueName: \"kubernetes.io/projected/200674e6-be9c-4d6d-be45-8648672e4fb6-kube-api-access-xmtgd\") pod \"certified-operators-tldmz\" (UID: \"200674e6-be9c-4d6d-be45-8648672e4fb6\") " pod="openshift-marketplace/certified-operators-tldmz" Sep 29 19:54:52 crc kubenswrapper[4779]: I0929 19:54:52.701917 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/200674e6-be9c-4d6d-be45-8648672e4fb6-utilities\") pod \"certified-operators-tldmz\" (UID: \"200674e6-be9c-4d6d-be45-8648672e4fb6\") " pod="openshift-marketplace/certified-operators-tldmz" Sep 29 19:54:52 crc kubenswrapper[4779]: I0929 19:54:52.701983 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/200674e6-be9c-4d6d-be45-8648672e4fb6-catalog-content\") pod \"certified-operators-tldmz\" (UID: \"200674e6-be9c-4d6d-be45-8648672e4fb6\") " pod="openshift-marketplace/certified-operators-tldmz" Sep 29 19:54:52 crc kubenswrapper[4779]: I0929 19:54:52.702091 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xmtgd\" (UniqueName: \"kubernetes.io/projected/200674e6-be9c-4d6d-be45-8648672e4fb6-kube-api-access-xmtgd\") pod \"certified-operators-tldmz\" (UID: \"200674e6-be9c-4d6d-be45-8648672e4fb6\") " pod="openshift-marketplace/certified-operators-tldmz" Sep 29 19:54:52 crc kubenswrapper[4779]: I0929 19:54:52.702959 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/200674e6-be9c-4d6d-be45-8648672e4fb6-utilities\") pod \"certified-operators-tldmz\" (UID: \"200674e6-be9c-4d6d-be45-8648672e4fb6\") " pod="openshift-marketplace/certified-operators-tldmz" Sep 29 19:54:52 crc kubenswrapper[4779]: I0929 19:54:52.702997 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/200674e6-be9c-4d6d-be45-8648672e4fb6-catalog-content\") pod \"certified-operators-tldmz\" (UID: \"200674e6-be9c-4d6d-be45-8648672e4fb6\") " pod="openshift-marketplace/certified-operators-tldmz" Sep 29 19:54:52 crc kubenswrapper[4779]: I0929 19:54:52.727001 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xmtgd\" (UniqueName: \"kubernetes.io/projected/200674e6-be9c-4d6d-be45-8648672e4fb6-kube-api-access-xmtgd\") pod \"certified-operators-tldmz\" (UID: \"200674e6-be9c-4d6d-be45-8648672e4fb6\") " pod="openshift-marketplace/certified-operators-tldmz" Sep 29 19:54:52 crc kubenswrapper[4779]: I0929 19:54:52.792773 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-tldmz" Sep 29 19:54:53 crc kubenswrapper[4779]: I0929 19:54:53.315376 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-tldmz"] Sep 29 19:54:53 crc kubenswrapper[4779]: W0929 19:54:53.327155 4779 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod200674e6_be9c_4d6d_be45_8648672e4fb6.slice/crio-e462fc600d263fa85dd896f7c22f0b59adad68daa0653190b531935f6d9b2463 WatchSource:0}: Error finding container e462fc600d263fa85dd896f7c22f0b59adad68daa0653190b531935f6d9b2463: Status 404 returned error can't find the container with id e462fc600d263fa85dd896f7c22f0b59adad68daa0653190b531935f6d9b2463 Sep 29 19:54:53 crc kubenswrapper[4779]: I0929 19:54:53.635681 4779 generic.go:334] "Generic (PLEG): container finished" podID="200674e6-be9c-4d6d-be45-8648672e4fb6" containerID="b6c9b55888039dc1ef6613e11f0b29756d899f0ccd1e07de75a04655d8c9a4ad" exitCode=0 Sep 29 19:54:53 crc kubenswrapper[4779]: I0929 19:54:53.635751 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-tldmz" event={"ID":"200674e6-be9c-4d6d-be45-8648672e4fb6","Type":"ContainerDied","Data":"b6c9b55888039dc1ef6613e11f0b29756d899f0ccd1e07de75a04655d8c9a4ad"} Sep 29 19:54:53 crc kubenswrapper[4779]: I0929 19:54:53.637767 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-tldmz" event={"ID":"200674e6-be9c-4d6d-be45-8648672e4fb6","Type":"ContainerStarted","Data":"e462fc600d263fa85dd896f7c22f0b59adad68daa0653190b531935f6d9b2463"} Sep 29 19:54:54 crc kubenswrapper[4779]: I0929 19:54:54.651708 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-tldmz" event={"ID":"200674e6-be9c-4d6d-be45-8648672e4fb6","Type":"ContainerStarted","Data":"5001a3a0b2a2cf666f556af6fd0b53b8e6ed19cb03e624485a0ab7a380aa1888"} Sep 29 19:54:55 crc kubenswrapper[4779]: I0929 19:54:55.662769 4779 generic.go:334] "Generic (PLEG): container finished" podID="200674e6-be9c-4d6d-be45-8648672e4fb6" containerID="5001a3a0b2a2cf666f556af6fd0b53b8e6ed19cb03e624485a0ab7a380aa1888" exitCode=0 Sep 29 19:54:55 crc kubenswrapper[4779]: I0929 19:54:55.662837 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-tldmz" event={"ID":"200674e6-be9c-4d6d-be45-8648672e4fb6","Type":"ContainerDied","Data":"5001a3a0b2a2cf666f556af6fd0b53b8e6ed19cb03e624485a0ab7a380aa1888"} Sep 29 19:54:56 crc kubenswrapper[4779]: I0929 19:54:56.674123 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-tldmz" event={"ID":"200674e6-be9c-4d6d-be45-8648672e4fb6","Type":"ContainerStarted","Data":"6e9ec0d749846d51fbb20bca1b97851f88213372502d6556554a0c61b18dc11d"} Sep 29 19:55:02 crc kubenswrapper[4779]: I0929 19:55:02.793937 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-tldmz" Sep 29 19:55:02 crc kubenswrapper[4779]: I0929 19:55:02.794687 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-tldmz" Sep 29 19:55:02 crc kubenswrapper[4779]: I0929 19:55:02.866394 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-tldmz" Sep 29 19:55:02 crc kubenswrapper[4779]: I0929 19:55:02.893351 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-tldmz" podStartSLOduration=8.411471739 podStartE2EDuration="10.893299156s" podCreationTimestamp="2025-09-29 19:54:52 +0000 UTC" firstStartedPulling="2025-09-29 19:54:53.637512961 +0000 UTC m=+2804.521938061" lastFinishedPulling="2025-09-29 19:54:56.119340348 +0000 UTC m=+2807.003765478" observedRunningTime="2025-09-29 19:54:56.715128164 +0000 UTC m=+2807.599553264" watchObservedRunningTime="2025-09-29 19:55:02.893299156 +0000 UTC m=+2813.777724296" Sep 29 19:55:03 crc kubenswrapper[4779]: I0929 19:55:03.839557 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-tldmz" Sep 29 19:55:03 crc kubenswrapper[4779]: I0929 19:55:03.910546 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-tldmz"] Sep 29 19:55:05 crc kubenswrapper[4779]: I0929 19:55:05.778569 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-tldmz" podUID="200674e6-be9c-4d6d-be45-8648672e4fb6" containerName="registry-server" containerID="cri-o://6e9ec0d749846d51fbb20bca1b97851f88213372502d6556554a0c61b18dc11d" gracePeriod=2 Sep 29 19:55:06 crc kubenswrapper[4779]: I0929 19:55:06.224637 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-tldmz" Sep 29 19:55:06 crc kubenswrapper[4779]: I0929 19:55:06.310577 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/200674e6-be9c-4d6d-be45-8648672e4fb6-utilities\") pod \"200674e6-be9c-4d6d-be45-8648672e4fb6\" (UID: \"200674e6-be9c-4d6d-be45-8648672e4fb6\") " Sep 29 19:55:06 crc kubenswrapper[4779]: I0929 19:55:06.310652 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/200674e6-be9c-4d6d-be45-8648672e4fb6-catalog-content\") pod \"200674e6-be9c-4d6d-be45-8648672e4fb6\" (UID: \"200674e6-be9c-4d6d-be45-8648672e4fb6\") " Sep 29 19:55:06 crc kubenswrapper[4779]: I0929 19:55:06.310751 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xmtgd\" (UniqueName: \"kubernetes.io/projected/200674e6-be9c-4d6d-be45-8648672e4fb6-kube-api-access-xmtgd\") pod \"200674e6-be9c-4d6d-be45-8648672e4fb6\" (UID: \"200674e6-be9c-4d6d-be45-8648672e4fb6\") " Sep 29 19:55:06 crc kubenswrapper[4779]: I0929 19:55:06.312748 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/200674e6-be9c-4d6d-be45-8648672e4fb6-utilities" (OuterVolumeSpecName: "utilities") pod "200674e6-be9c-4d6d-be45-8648672e4fb6" (UID: "200674e6-be9c-4d6d-be45-8648672e4fb6"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 19:55:06 crc kubenswrapper[4779]: I0929 19:55:06.316388 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/200674e6-be9c-4d6d-be45-8648672e4fb6-kube-api-access-xmtgd" (OuterVolumeSpecName: "kube-api-access-xmtgd") pod "200674e6-be9c-4d6d-be45-8648672e4fb6" (UID: "200674e6-be9c-4d6d-be45-8648672e4fb6"). InnerVolumeSpecName "kube-api-access-xmtgd". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:55:06 crc kubenswrapper[4779]: I0929 19:55:06.357712 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/200674e6-be9c-4d6d-be45-8648672e4fb6-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "200674e6-be9c-4d6d-be45-8648672e4fb6" (UID: "200674e6-be9c-4d6d-be45-8648672e4fb6"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 19:55:06 crc kubenswrapper[4779]: I0929 19:55:06.412544 4779 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/200674e6-be9c-4d6d-be45-8648672e4fb6-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 19:55:06 crc kubenswrapper[4779]: I0929 19:55:06.412573 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xmtgd\" (UniqueName: \"kubernetes.io/projected/200674e6-be9c-4d6d-be45-8648672e4fb6-kube-api-access-xmtgd\") on node \"crc\" DevicePath \"\"" Sep 29 19:55:06 crc kubenswrapper[4779]: I0929 19:55:06.412586 4779 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/200674e6-be9c-4d6d-be45-8648672e4fb6-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 19:55:06 crc kubenswrapper[4779]: I0929 19:55:06.792469 4779 generic.go:334] "Generic (PLEG): container finished" podID="200674e6-be9c-4d6d-be45-8648672e4fb6" containerID="6e9ec0d749846d51fbb20bca1b97851f88213372502d6556554a0c61b18dc11d" exitCode=0 Sep 29 19:55:06 crc kubenswrapper[4779]: I0929 19:55:06.792532 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-tldmz" event={"ID":"200674e6-be9c-4d6d-be45-8648672e4fb6","Type":"ContainerDied","Data":"6e9ec0d749846d51fbb20bca1b97851f88213372502d6556554a0c61b18dc11d"} Sep 29 19:55:06 crc kubenswrapper[4779]: I0929 19:55:06.792535 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-tldmz" Sep 29 19:55:06 crc kubenswrapper[4779]: I0929 19:55:06.792587 4779 scope.go:117] "RemoveContainer" containerID="6e9ec0d749846d51fbb20bca1b97851f88213372502d6556554a0c61b18dc11d" Sep 29 19:55:06 crc kubenswrapper[4779]: I0929 19:55:06.792570 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-tldmz" event={"ID":"200674e6-be9c-4d6d-be45-8648672e4fb6","Type":"ContainerDied","Data":"e462fc600d263fa85dd896f7c22f0b59adad68daa0653190b531935f6d9b2463"} Sep 29 19:55:06 crc kubenswrapper[4779]: I0929 19:55:06.823578 4779 scope.go:117] "RemoveContainer" containerID="5001a3a0b2a2cf666f556af6fd0b53b8e6ed19cb03e624485a0ab7a380aa1888" Sep 29 19:55:06 crc kubenswrapper[4779]: I0929 19:55:06.842345 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-tldmz"] Sep 29 19:55:06 crc kubenswrapper[4779]: I0929 19:55:06.847546 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-tldmz"] Sep 29 19:55:06 crc kubenswrapper[4779]: I0929 19:55:06.868735 4779 scope.go:117] "RemoveContainer" containerID="b6c9b55888039dc1ef6613e11f0b29756d899f0ccd1e07de75a04655d8c9a4ad" Sep 29 19:55:06 crc kubenswrapper[4779]: I0929 19:55:06.928601 4779 scope.go:117] "RemoveContainer" containerID="6e9ec0d749846d51fbb20bca1b97851f88213372502d6556554a0c61b18dc11d" Sep 29 19:55:06 crc kubenswrapper[4779]: E0929 19:55:06.929255 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6e9ec0d749846d51fbb20bca1b97851f88213372502d6556554a0c61b18dc11d\": container with ID starting with 6e9ec0d749846d51fbb20bca1b97851f88213372502d6556554a0c61b18dc11d not found: ID does not exist" containerID="6e9ec0d749846d51fbb20bca1b97851f88213372502d6556554a0c61b18dc11d" Sep 29 19:55:06 crc kubenswrapper[4779]: I0929 19:55:06.929292 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6e9ec0d749846d51fbb20bca1b97851f88213372502d6556554a0c61b18dc11d"} err="failed to get container status \"6e9ec0d749846d51fbb20bca1b97851f88213372502d6556554a0c61b18dc11d\": rpc error: code = NotFound desc = could not find container \"6e9ec0d749846d51fbb20bca1b97851f88213372502d6556554a0c61b18dc11d\": container with ID starting with 6e9ec0d749846d51fbb20bca1b97851f88213372502d6556554a0c61b18dc11d not found: ID does not exist" Sep 29 19:55:06 crc kubenswrapper[4779]: I0929 19:55:06.929347 4779 scope.go:117] "RemoveContainer" containerID="5001a3a0b2a2cf666f556af6fd0b53b8e6ed19cb03e624485a0ab7a380aa1888" Sep 29 19:55:06 crc kubenswrapper[4779]: E0929 19:55:06.929967 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5001a3a0b2a2cf666f556af6fd0b53b8e6ed19cb03e624485a0ab7a380aa1888\": container with ID starting with 5001a3a0b2a2cf666f556af6fd0b53b8e6ed19cb03e624485a0ab7a380aa1888 not found: ID does not exist" containerID="5001a3a0b2a2cf666f556af6fd0b53b8e6ed19cb03e624485a0ab7a380aa1888" Sep 29 19:55:06 crc kubenswrapper[4779]: I0929 19:55:06.930068 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5001a3a0b2a2cf666f556af6fd0b53b8e6ed19cb03e624485a0ab7a380aa1888"} err="failed to get container status \"5001a3a0b2a2cf666f556af6fd0b53b8e6ed19cb03e624485a0ab7a380aa1888\": rpc error: code = NotFound desc = could not find container \"5001a3a0b2a2cf666f556af6fd0b53b8e6ed19cb03e624485a0ab7a380aa1888\": container with ID starting with 5001a3a0b2a2cf666f556af6fd0b53b8e6ed19cb03e624485a0ab7a380aa1888 not found: ID does not exist" Sep 29 19:55:06 crc kubenswrapper[4779]: I0929 19:55:06.930155 4779 scope.go:117] "RemoveContainer" containerID="b6c9b55888039dc1ef6613e11f0b29756d899f0ccd1e07de75a04655d8c9a4ad" Sep 29 19:55:06 crc kubenswrapper[4779]: E0929 19:55:06.930992 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b6c9b55888039dc1ef6613e11f0b29756d899f0ccd1e07de75a04655d8c9a4ad\": container with ID starting with b6c9b55888039dc1ef6613e11f0b29756d899f0ccd1e07de75a04655d8c9a4ad not found: ID does not exist" containerID="b6c9b55888039dc1ef6613e11f0b29756d899f0ccd1e07de75a04655d8c9a4ad" Sep 29 19:55:06 crc kubenswrapper[4779]: I0929 19:55:06.931051 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b6c9b55888039dc1ef6613e11f0b29756d899f0ccd1e07de75a04655d8c9a4ad"} err="failed to get container status \"b6c9b55888039dc1ef6613e11f0b29756d899f0ccd1e07de75a04655d8c9a4ad\": rpc error: code = NotFound desc = could not find container \"b6c9b55888039dc1ef6613e11f0b29756d899f0ccd1e07de75a04655d8c9a4ad\": container with ID starting with b6c9b55888039dc1ef6613e11f0b29756d899f0ccd1e07de75a04655d8c9a4ad not found: ID does not exist" Sep 29 19:55:07 crc kubenswrapper[4779]: I0929 19:55:07.785998 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="200674e6-be9c-4d6d-be45-8648672e4fb6" path="/var/lib/kubelet/pods/200674e6-be9c-4d6d-be45-8648672e4fb6/volumes" Sep 29 19:55:10 crc kubenswrapper[4779]: I0929 19:55:10.912889 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-66x4q"] Sep 29 19:55:10 crc kubenswrapper[4779]: E0929 19:55:10.913723 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="200674e6-be9c-4d6d-be45-8648672e4fb6" containerName="extract-utilities" Sep 29 19:55:10 crc kubenswrapper[4779]: I0929 19:55:10.913734 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="200674e6-be9c-4d6d-be45-8648672e4fb6" containerName="extract-utilities" Sep 29 19:55:10 crc kubenswrapper[4779]: E0929 19:55:10.913750 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="200674e6-be9c-4d6d-be45-8648672e4fb6" containerName="registry-server" Sep 29 19:55:10 crc kubenswrapper[4779]: I0929 19:55:10.913756 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="200674e6-be9c-4d6d-be45-8648672e4fb6" containerName="registry-server" Sep 29 19:55:10 crc kubenswrapper[4779]: E0929 19:55:10.913781 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="200674e6-be9c-4d6d-be45-8648672e4fb6" containerName="extract-content" Sep 29 19:55:10 crc kubenswrapper[4779]: I0929 19:55:10.913787 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="200674e6-be9c-4d6d-be45-8648672e4fb6" containerName="extract-content" Sep 29 19:55:10 crc kubenswrapper[4779]: I0929 19:55:10.913976 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="200674e6-be9c-4d6d-be45-8648672e4fb6" containerName="registry-server" Sep 29 19:55:10 crc kubenswrapper[4779]: I0929 19:55:10.915346 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-66x4q" Sep 29 19:55:10 crc kubenswrapper[4779]: I0929 19:55:10.926912 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-66x4q"] Sep 29 19:55:11 crc kubenswrapper[4779]: I0929 19:55:11.110274 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/30545e09-fcc1-4156-a362-1f1d692bbbe7-utilities\") pod \"redhat-operators-66x4q\" (UID: \"30545e09-fcc1-4156-a362-1f1d692bbbe7\") " pod="openshift-marketplace/redhat-operators-66x4q" Sep 29 19:55:11 crc kubenswrapper[4779]: I0929 19:55:11.110435 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ld26v\" (UniqueName: \"kubernetes.io/projected/30545e09-fcc1-4156-a362-1f1d692bbbe7-kube-api-access-ld26v\") pod \"redhat-operators-66x4q\" (UID: \"30545e09-fcc1-4156-a362-1f1d692bbbe7\") " pod="openshift-marketplace/redhat-operators-66x4q" Sep 29 19:55:11 crc kubenswrapper[4779]: I0929 19:55:11.110484 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/30545e09-fcc1-4156-a362-1f1d692bbbe7-catalog-content\") pod \"redhat-operators-66x4q\" (UID: \"30545e09-fcc1-4156-a362-1f1d692bbbe7\") " pod="openshift-marketplace/redhat-operators-66x4q" Sep 29 19:55:11 crc kubenswrapper[4779]: I0929 19:55:11.212037 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/30545e09-fcc1-4156-a362-1f1d692bbbe7-utilities\") pod \"redhat-operators-66x4q\" (UID: \"30545e09-fcc1-4156-a362-1f1d692bbbe7\") " pod="openshift-marketplace/redhat-operators-66x4q" Sep 29 19:55:11 crc kubenswrapper[4779]: I0929 19:55:11.212136 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ld26v\" (UniqueName: \"kubernetes.io/projected/30545e09-fcc1-4156-a362-1f1d692bbbe7-kube-api-access-ld26v\") pod \"redhat-operators-66x4q\" (UID: \"30545e09-fcc1-4156-a362-1f1d692bbbe7\") " pod="openshift-marketplace/redhat-operators-66x4q" Sep 29 19:55:11 crc kubenswrapper[4779]: I0929 19:55:11.212160 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/30545e09-fcc1-4156-a362-1f1d692bbbe7-catalog-content\") pod \"redhat-operators-66x4q\" (UID: \"30545e09-fcc1-4156-a362-1f1d692bbbe7\") " pod="openshift-marketplace/redhat-operators-66x4q" Sep 29 19:55:11 crc kubenswrapper[4779]: I0929 19:55:11.212890 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/30545e09-fcc1-4156-a362-1f1d692bbbe7-catalog-content\") pod \"redhat-operators-66x4q\" (UID: \"30545e09-fcc1-4156-a362-1f1d692bbbe7\") " pod="openshift-marketplace/redhat-operators-66x4q" Sep 29 19:55:11 crc kubenswrapper[4779]: I0929 19:55:11.212955 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/30545e09-fcc1-4156-a362-1f1d692bbbe7-utilities\") pod \"redhat-operators-66x4q\" (UID: \"30545e09-fcc1-4156-a362-1f1d692bbbe7\") " pod="openshift-marketplace/redhat-operators-66x4q" Sep 29 19:55:11 crc kubenswrapper[4779]: I0929 19:55:11.234809 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ld26v\" (UniqueName: \"kubernetes.io/projected/30545e09-fcc1-4156-a362-1f1d692bbbe7-kube-api-access-ld26v\") pod \"redhat-operators-66x4q\" (UID: \"30545e09-fcc1-4156-a362-1f1d692bbbe7\") " pod="openshift-marketplace/redhat-operators-66x4q" Sep 29 19:55:11 crc kubenswrapper[4779]: I0929 19:55:11.235376 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-66x4q" Sep 29 19:55:11 crc kubenswrapper[4779]: I0929 19:55:11.729721 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-66x4q"] Sep 29 19:55:11 crc kubenswrapper[4779]: I0929 19:55:11.856054 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-66x4q" event={"ID":"30545e09-fcc1-4156-a362-1f1d692bbbe7","Type":"ContainerStarted","Data":"31160764fb1d16a7f5033dafba374e7f9af534aadcfcccf005c08923fe3419d4"} Sep 29 19:55:12 crc kubenswrapper[4779]: I0929 19:55:12.870357 4779 generic.go:334] "Generic (PLEG): container finished" podID="30545e09-fcc1-4156-a362-1f1d692bbbe7" containerID="5bc770d392e81c55f342e1af37c0dfe115c103b4eca2951a9585a4d7919bb7be" exitCode=0 Sep 29 19:55:12 crc kubenswrapper[4779]: I0929 19:55:12.870454 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-66x4q" event={"ID":"30545e09-fcc1-4156-a362-1f1d692bbbe7","Type":"ContainerDied","Data":"5bc770d392e81c55f342e1af37c0dfe115c103b4eca2951a9585a4d7919bb7be"} Sep 29 19:55:12 crc kubenswrapper[4779]: I0929 19:55:12.874539 4779 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Sep 29 19:55:14 crc kubenswrapper[4779]: I0929 19:55:14.914636 4779 generic.go:334] "Generic (PLEG): container finished" podID="30545e09-fcc1-4156-a362-1f1d692bbbe7" containerID="f9e632f4637c7475b4a96c86843c6680c2d1a7a5a60c544cc3e2f954235dd70f" exitCode=0 Sep 29 19:55:14 crc kubenswrapper[4779]: I0929 19:55:14.914740 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-66x4q" event={"ID":"30545e09-fcc1-4156-a362-1f1d692bbbe7","Type":"ContainerDied","Data":"f9e632f4637c7475b4a96c86843c6680c2d1a7a5a60c544cc3e2f954235dd70f"} Sep 29 19:55:15 crc kubenswrapper[4779]: I0929 19:55:15.927832 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-66x4q" event={"ID":"30545e09-fcc1-4156-a362-1f1d692bbbe7","Type":"ContainerStarted","Data":"9a303eee9609f80308d3b017d04e0a4557e59f4c065a01f2ab5eefed09281f12"} Sep 29 19:55:15 crc kubenswrapper[4779]: I0929 19:55:15.956203 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-66x4q" podStartSLOduration=3.32519948 podStartE2EDuration="5.956180592s" podCreationTimestamp="2025-09-29 19:55:10 +0000 UTC" firstStartedPulling="2025-09-29 19:55:12.874262781 +0000 UTC m=+2823.758687881" lastFinishedPulling="2025-09-29 19:55:15.505243883 +0000 UTC m=+2826.389668993" observedRunningTime="2025-09-29 19:55:15.944472852 +0000 UTC m=+2826.828897962" watchObservedRunningTime="2025-09-29 19:55:15.956180592 +0000 UTC m=+2826.840605702" Sep 29 19:55:21 crc kubenswrapper[4779]: I0929 19:55:21.236223 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-66x4q" Sep 29 19:55:21 crc kubenswrapper[4779]: I0929 19:55:21.237534 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-66x4q" Sep 29 19:55:22 crc kubenswrapper[4779]: I0929 19:55:22.294569 4779 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-66x4q" podUID="30545e09-fcc1-4156-a362-1f1d692bbbe7" containerName="registry-server" probeResult="failure" output=< Sep 29 19:55:22 crc kubenswrapper[4779]: timeout: failed to connect service ":50051" within 1s Sep 29 19:55:22 crc kubenswrapper[4779]: > Sep 29 19:55:31 crc kubenswrapper[4779]: I0929 19:55:31.293605 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-66x4q" Sep 29 19:55:31 crc kubenswrapper[4779]: I0929 19:55:31.359081 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-66x4q" Sep 29 19:55:31 crc kubenswrapper[4779]: I0929 19:55:31.536421 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-66x4q"] Sep 29 19:55:33 crc kubenswrapper[4779]: I0929 19:55:33.105714 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-66x4q" podUID="30545e09-fcc1-4156-a362-1f1d692bbbe7" containerName="registry-server" containerID="cri-o://9a303eee9609f80308d3b017d04e0a4557e59f4c065a01f2ab5eefed09281f12" gracePeriod=2 Sep 29 19:55:33 crc kubenswrapper[4779]: E0929 19:55:33.257451 4779 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod30545e09_fcc1_4156_a362_1f1d692bbbe7.slice/crio-9a303eee9609f80308d3b017d04e0a4557e59f4c065a01f2ab5eefed09281f12.scope\": RecentStats: unable to find data in memory cache]" Sep 29 19:55:33 crc kubenswrapper[4779]: I0929 19:55:33.651820 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-66x4q" Sep 29 19:55:33 crc kubenswrapper[4779]: I0929 19:55:33.805591 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ld26v\" (UniqueName: \"kubernetes.io/projected/30545e09-fcc1-4156-a362-1f1d692bbbe7-kube-api-access-ld26v\") pod \"30545e09-fcc1-4156-a362-1f1d692bbbe7\" (UID: \"30545e09-fcc1-4156-a362-1f1d692bbbe7\") " Sep 29 19:55:33 crc kubenswrapper[4779]: I0929 19:55:33.805797 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/30545e09-fcc1-4156-a362-1f1d692bbbe7-catalog-content\") pod \"30545e09-fcc1-4156-a362-1f1d692bbbe7\" (UID: \"30545e09-fcc1-4156-a362-1f1d692bbbe7\") " Sep 29 19:55:33 crc kubenswrapper[4779]: I0929 19:55:33.805869 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/30545e09-fcc1-4156-a362-1f1d692bbbe7-utilities\") pod \"30545e09-fcc1-4156-a362-1f1d692bbbe7\" (UID: \"30545e09-fcc1-4156-a362-1f1d692bbbe7\") " Sep 29 19:55:33 crc kubenswrapper[4779]: I0929 19:55:33.806850 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/30545e09-fcc1-4156-a362-1f1d692bbbe7-utilities" (OuterVolumeSpecName: "utilities") pod "30545e09-fcc1-4156-a362-1f1d692bbbe7" (UID: "30545e09-fcc1-4156-a362-1f1d692bbbe7"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 19:55:33 crc kubenswrapper[4779]: I0929 19:55:33.814889 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/30545e09-fcc1-4156-a362-1f1d692bbbe7-kube-api-access-ld26v" (OuterVolumeSpecName: "kube-api-access-ld26v") pod "30545e09-fcc1-4156-a362-1f1d692bbbe7" (UID: "30545e09-fcc1-4156-a362-1f1d692bbbe7"). InnerVolumeSpecName "kube-api-access-ld26v". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 19:55:33 crc kubenswrapper[4779]: I0929 19:55:33.908469 4779 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/30545e09-fcc1-4156-a362-1f1d692bbbe7-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 19:55:33 crc kubenswrapper[4779]: I0929 19:55:33.908802 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ld26v\" (UniqueName: \"kubernetes.io/projected/30545e09-fcc1-4156-a362-1f1d692bbbe7-kube-api-access-ld26v\") on node \"crc\" DevicePath \"\"" Sep 29 19:55:33 crc kubenswrapper[4779]: I0929 19:55:33.925197 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/30545e09-fcc1-4156-a362-1f1d692bbbe7-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "30545e09-fcc1-4156-a362-1f1d692bbbe7" (UID: "30545e09-fcc1-4156-a362-1f1d692bbbe7"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 19:55:34 crc kubenswrapper[4779]: I0929 19:55:34.011695 4779 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/30545e09-fcc1-4156-a362-1f1d692bbbe7-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 19:55:34 crc kubenswrapper[4779]: I0929 19:55:34.120833 4779 generic.go:334] "Generic (PLEG): container finished" podID="30545e09-fcc1-4156-a362-1f1d692bbbe7" containerID="9a303eee9609f80308d3b017d04e0a4557e59f4c065a01f2ab5eefed09281f12" exitCode=0 Sep 29 19:55:34 crc kubenswrapper[4779]: I0929 19:55:34.120919 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-66x4q" event={"ID":"30545e09-fcc1-4156-a362-1f1d692bbbe7","Type":"ContainerDied","Data":"9a303eee9609f80308d3b017d04e0a4557e59f4c065a01f2ab5eefed09281f12"} Sep 29 19:55:34 crc kubenswrapper[4779]: I0929 19:55:34.120949 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-66x4q" Sep 29 19:55:34 crc kubenswrapper[4779]: I0929 19:55:34.120994 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-66x4q" event={"ID":"30545e09-fcc1-4156-a362-1f1d692bbbe7","Type":"ContainerDied","Data":"31160764fb1d16a7f5033dafba374e7f9af534aadcfcccf005c08923fe3419d4"} Sep 29 19:55:34 crc kubenswrapper[4779]: I0929 19:55:34.121045 4779 scope.go:117] "RemoveContainer" containerID="9a303eee9609f80308d3b017d04e0a4557e59f4c065a01f2ab5eefed09281f12" Sep 29 19:55:34 crc kubenswrapper[4779]: I0929 19:55:34.160397 4779 scope.go:117] "RemoveContainer" containerID="f9e632f4637c7475b4a96c86843c6680c2d1a7a5a60c544cc3e2f954235dd70f" Sep 29 19:55:34 crc kubenswrapper[4779]: I0929 19:55:34.196706 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-66x4q"] Sep 29 19:55:34 crc kubenswrapper[4779]: I0929 19:55:34.204228 4779 scope.go:117] "RemoveContainer" containerID="5bc770d392e81c55f342e1af37c0dfe115c103b4eca2951a9585a4d7919bb7be" Sep 29 19:55:34 crc kubenswrapper[4779]: I0929 19:55:34.221296 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-66x4q"] Sep 29 19:55:34 crc kubenswrapper[4779]: I0929 19:55:34.263308 4779 scope.go:117] "RemoveContainer" containerID="9a303eee9609f80308d3b017d04e0a4557e59f4c065a01f2ab5eefed09281f12" Sep 29 19:55:34 crc kubenswrapper[4779]: E0929 19:55:34.263931 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9a303eee9609f80308d3b017d04e0a4557e59f4c065a01f2ab5eefed09281f12\": container with ID starting with 9a303eee9609f80308d3b017d04e0a4557e59f4c065a01f2ab5eefed09281f12 not found: ID does not exist" containerID="9a303eee9609f80308d3b017d04e0a4557e59f4c065a01f2ab5eefed09281f12" Sep 29 19:55:34 crc kubenswrapper[4779]: I0929 19:55:34.264002 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9a303eee9609f80308d3b017d04e0a4557e59f4c065a01f2ab5eefed09281f12"} err="failed to get container status \"9a303eee9609f80308d3b017d04e0a4557e59f4c065a01f2ab5eefed09281f12\": rpc error: code = NotFound desc = could not find container \"9a303eee9609f80308d3b017d04e0a4557e59f4c065a01f2ab5eefed09281f12\": container with ID starting with 9a303eee9609f80308d3b017d04e0a4557e59f4c065a01f2ab5eefed09281f12 not found: ID does not exist" Sep 29 19:55:34 crc kubenswrapper[4779]: I0929 19:55:34.264045 4779 scope.go:117] "RemoveContainer" containerID="f9e632f4637c7475b4a96c86843c6680c2d1a7a5a60c544cc3e2f954235dd70f" Sep 29 19:55:34 crc kubenswrapper[4779]: E0929 19:55:34.264608 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f9e632f4637c7475b4a96c86843c6680c2d1a7a5a60c544cc3e2f954235dd70f\": container with ID starting with f9e632f4637c7475b4a96c86843c6680c2d1a7a5a60c544cc3e2f954235dd70f not found: ID does not exist" containerID="f9e632f4637c7475b4a96c86843c6680c2d1a7a5a60c544cc3e2f954235dd70f" Sep 29 19:55:34 crc kubenswrapper[4779]: I0929 19:55:34.264676 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f9e632f4637c7475b4a96c86843c6680c2d1a7a5a60c544cc3e2f954235dd70f"} err="failed to get container status \"f9e632f4637c7475b4a96c86843c6680c2d1a7a5a60c544cc3e2f954235dd70f\": rpc error: code = NotFound desc = could not find container \"f9e632f4637c7475b4a96c86843c6680c2d1a7a5a60c544cc3e2f954235dd70f\": container with ID starting with f9e632f4637c7475b4a96c86843c6680c2d1a7a5a60c544cc3e2f954235dd70f not found: ID does not exist" Sep 29 19:55:34 crc kubenswrapper[4779]: I0929 19:55:34.264704 4779 scope.go:117] "RemoveContainer" containerID="5bc770d392e81c55f342e1af37c0dfe115c103b4eca2951a9585a4d7919bb7be" Sep 29 19:55:34 crc kubenswrapper[4779]: E0929 19:55:34.265016 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5bc770d392e81c55f342e1af37c0dfe115c103b4eca2951a9585a4d7919bb7be\": container with ID starting with 5bc770d392e81c55f342e1af37c0dfe115c103b4eca2951a9585a4d7919bb7be not found: ID does not exist" containerID="5bc770d392e81c55f342e1af37c0dfe115c103b4eca2951a9585a4d7919bb7be" Sep 29 19:55:34 crc kubenswrapper[4779]: I0929 19:55:34.265052 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5bc770d392e81c55f342e1af37c0dfe115c103b4eca2951a9585a4d7919bb7be"} err="failed to get container status \"5bc770d392e81c55f342e1af37c0dfe115c103b4eca2951a9585a4d7919bb7be\": rpc error: code = NotFound desc = could not find container \"5bc770d392e81c55f342e1af37c0dfe115c103b4eca2951a9585a4d7919bb7be\": container with ID starting with 5bc770d392e81c55f342e1af37c0dfe115c103b4eca2951a9585a4d7919bb7be not found: ID does not exist" Sep 29 19:55:35 crc kubenswrapper[4779]: I0929 19:55:35.778208 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="30545e09-fcc1-4156-a362-1f1d692bbbe7" path="/var/lib/kubelet/pods/30545e09-fcc1-4156-a362-1f1d692bbbe7/volumes" Sep 29 19:55:43 crc kubenswrapper[4779]: I0929 19:55:43.785571 4779 patch_prober.go:28] interesting pod/machine-config-daemon-d5cnr container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 19:55:43 crc kubenswrapper[4779]: I0929 19:55:43.786117 4779 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" podUID="476bc421-1113-455e-bcc8-e207e47dad19" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 19:56:13 crc kubenswrapper[4779]: I0929 19:56:13.784842 4779 patch_prober.go:28] interesting pod/machine-config-daemon-d5cnr container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 19:56:13 crc kubenswrapper[4779]: I0929 19:56:13.785488 4779 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" podUID="476bc421-1113-455e-bcc8-e207e47dad19" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 19:56:43 crc kubenswrapper[4779]: I0929 19:56:43.785556 4779 patch_prober.go:28] interesting pod/machine-config-daemon-d5cnr container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 19:56:43 crc kubenswrapper[4779]: I0929 19:56:43.786122 4779 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" podUID="476bc421-1113-455e-bcc8-e207e47dad19" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 19:56:43 crc kubenswrapper[4779]: I0929 19:56:43.786617 4779 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" Sep 29 19:56:43 crc kubenswrapper[4779]: I0929 19:56:43.787916 4779 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"b21c66ee30542a53761986d79c2d6a62c4d6497440c6bb49825d675e2825fb98"} pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 29 19:56:43 crc kubenswrapper[4779]: I0929 19:56:43.788034 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" podUID="476bc421-1113-455e-bcc8-e207e47dad19" containerName="machine-config-daemon" containerID="cri-o://b21c66ee30542a53761986d79c2d6a62c4d6497440c6bb49825d675e2825fb98" gracePeriod=600 Sep 29 19:56:44 crc kubenswrapper[4779]: I0929 19:56:44.908729 4779 generic.go:334] "Generic (PLEG): container finished" podID="476bc421-1113-455e-bcc8-e207e47dad19" containerID="b21c66ee30542a53761986d79c2d6a62c4d6497440c6bb49825d675e2825fb98" exitCode=0 Sep 29 19:56:44 crc kubenswrapper[4779]: I0929 19:56:44.908807 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" event={"ID":"476bc421-1113-455e-bcc8-e207e47dad19","Type":"ContainerDied","Data":"b21c66ee30542a53761986d79c2d6a62c4d6497440c6bb49825d675e2825fb98"} Sep 29 19:56:44 crc kubenswrapper[4779]: I0929 19:56:44.909433 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" event={"ID":"476bc421-1113-455e-bcc8-e207e47dad19","Type":"ContainerStarted","Data":"6d7a711d10d7b815d17c868c16f7d944cff261ad893818784ad5bfb184ae219a"} Sep 29 19:56:44 crc kubenswrapper[4779]: I0929 19:56:44.909473 4779 scope.go:117] "RemoveContainer" containerID="01e960f6c47393436db220d868c528f2bb6c5c1ddb0bfa806d83addfe11f5c5c" Sep 29 19:59:13 crc kubenswrapper[4779]: I0929 19:59:13.785403 4779 patch_prober.go:28] interesting pod/machine-config-daemon-d5cnr container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 19:59:13 crc kubenswrapper[4779]: I0929 19:59:13.786220 4779 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" podUID="476bc421-1113-455e-bcc8-e207e47dad19" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 19:59:43 crc kubenswrapper[4779]: I0929 19:59:43.785447 4779 patch_prober.go:28] interesting pod/machine-config-daemon-d5cnr container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 19:59:43 crc kubenswrapper[4779]: I0929 19:59:43.786978 4779 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" podUID="476bc421-1113-455e-bcc8-e207e47dad19" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 20:00:00 crc kubenswrapper[4779]: I0929 20:00:00.242998 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319600-n8phb"] Sep 29 20:00:00 crc kubenswrapper[4779]: E0929 20:00:00.243898 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="30545e09-fcc1-4156-a362-1f1d692bbbe7" containerName="extract-utilities" Sep 29 20:00:00 crc kubenswrapper[4779]: I0929 20:00:00.243915 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="30545e09-fcc1-4156-a362-1f1d692bbbe7" containerName="extract-utilities" Sep 29 20:00:00 crc kubenswrapper[4779]: E0929 20:00:00.243939 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="30545e09-fcc1-4156-a362-1f1d692bbbe7" containerName="extract-content" Sep 29 20:00:00 crc kubenswrapper[4779]: I0929 20:00:00.243949 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="30545e09-fcc1-4156-a362-1f1d692bbbe7" containerName="extract-content" Sep 29 20:00:00 crc kubenswrapper[4779]: E0929 20:00:00.243971 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="30545e09-fcc1-4156-a362-1f1d692bbbe7" containerName="registry-server" Sep 29 20:00:00 crc kubenswrapper[4779]: I0929 20:00:00.243979 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="30545e09-fcc1-4156-a362-1f1d692bbbe7" containerName="registry-server" Sep 29 20:00:00 crc kubenswrapper[4779]: I0929 20:00:00.244237 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="30545e09-fcc1-4156-a362-1f1d692bbbe7" containerName="registry-server" Sep 29 20:00:00 crc kubenswrapper[4779]: I0929 20:00:00.245274 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319600-n8phb" Sep 29 20:00:00 crc kubenswrapper[4779]: I0929 20:00:00.248125 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Sep 29 20:00:00 crc kubenswrapper[4779]: I0929 20:00:00.263749 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Sep 29 20:00:00 crc kubenswrapper[4779]: I0929 20:00:00.266072 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319600-n8phb"] Sep 29 20:00:00 crc kubenswrapper[4779]: I0929 20:00:00.356390 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/ed6dfb25-6e70-408c-8c31-4635ab3ba00c-config-volume\") pod \"collect-profiles-29319600-n8phb\" (UID: \"ed6dfb25-6e70-408c-8c31-4635ab3ba00c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319600-n8phb" Sep 29 20:00:00 crc kubenswrapper[4779]: I0929 20:00:00.356545 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xv75g\" (UniqueName: \"kubernetes.io/projected/ed6dfb25-6e70-408c-8c31-4635ab3ba00c-kube-api-access-xv75g\") pod \"collect-profiles-29319600-n8phb\" (UID: \"ed6dfb25-6e70-408c-8c31-4635ab3ba00c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319600-n8phb" Sep 29 20:00:00 crc kubenswrapper[4779]: I0929 20:00:00.356668 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/ed6dfb25-6e70-408c-8c31-4635ab3ba00c-secret-volume\") pod \"collect-profiles-29319600-n8phb\" (UID: \"ed6dfb25-6e70-408c-8c31-4635ab3ba00c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319600-n8phb" Sep 29 20:00:00 crc kubenswrapper[4779]: I0929 20:00:00.458877 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/ed6dfb25-6e70-408c-8c31-4635ab3ba00c-config-volume\") pod \"collect-profiles-29319600-n8phb\" (UID: \"ed6dfb25-6e70-408c-8c31-4635ab3ba00c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319600-n8phb" Sep 29 20:00:00 crc kubenswrapper[4779]: I0929 20:00:00.458980 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xv75g\" (UniqueName: \"kubernetes.io/projected/ed6dfb25-6e70-408c-8c31-4635ab3ba00c-kube-api-access-xv75g\") pod \"collect-profiles-29319600-n8phb\" (UID: \"ed6dfb25-6e70-408c-8c31-4635ab3ba00c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319600-n8phb" Sep 29 20:00:00 crc kubenswrapper[4779]: I0929 20:00:00.459033 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/ed6dfb25-6e70-408c-8c31-4635ab3ba00c-secret-volume\") pod \"collect-profiles-29319600-n8phb\" (UID: \"ed6dfb25-6e70-408c-8c31-4635ab3ba00c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319600-n8phb" Sep 29 20:00:00 crc kubenswrapper[4779]: I0929 20:00:00.460654 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/ed6dfb25-6e70-408c-8c31-4635ab3ba00c-config-volume\") pod \"collect-profiles-29319600-n8phb\" (UID: \"ed6dfb25-6e70-408c-8c31-4635ab3ba00c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319600-n8phb" Sep 29 20:00:00 crc kubenswrapper[4779]: I0929 20:00:00.470075 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/ed6dfb25-6e70-408c-8c31-4635ab3ba00c-secret-volume\") pod \"collect-profiles-29319600-n8phb\" (UID: \"ed6dfb25-6e70-408c-8c31-4635ab3ba00c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319600-n8phb" Sep 29 20:00:00 crc kubenswrapper[4779]: I0929 20:00:00.483398 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xv75g\" (UniqueName: \"kubernetes.io/projected/ed6dfb25-6e70-408c-8c31-4635ab3ba00c-kube-api-access-xv75g\") pod \"collect-profiles-29319600-n8phb\" (UID: \"ed6dfb25-6e70-408c-8c31-4635ab3ba00c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319600-n8phb" Sep 29 20:00:00 crc kubenswrapper[4779]: I0929 20:00:00.579721 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319600-n8phb" Sep 29 20:00:01 crc kubenswrapper[4779]: I0929 20:00:01.063580 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319600-n8phb"] Sep 29 20:00:01 crc kubenswrapper[4779]: I0929 20:00:01.218376 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29319600-n8phb" event={"ID":"ed6dfb25-6e70-408c-8c31-4635ab3ba00c","Type":"ContainerStarted","Data":"ed58f0425ec9d976884845b27f5b1dfdc1e1b3a4bfd8a8876838b7cce09bdb2b"} Sep 29 20:00:02 crc kubenswrapper[4779]: I0929 20:00:02.234087 4779 generic.go:334] "Generic (PLEG): container finished" podID="ed6dfb25-6e70-408c-8c31-4635ab3ba00c" containerID="0170d57f196f18dbd409a31ca0fa960bf6bec7f53955e10f739da5848ecccadf" exitCode=0 Sep 29 20:00:02 crc kubenswrapper[4779]: I0929 20:00:02.234185 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29319600-n8phb" event={"ID":"ed6dfb25-6e70-408c-8c31-4635ab3ba00c","Type":"ContainerDied","Data":"0170d57f196f18dbd409a31ca0fa960bf6bec7f53955e10f739da5848ecccadf"} Sep 29 20:00:03 crc kubenswrapper[4779]: I0929 20:00:03.688624 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319600-n8phb" Sep 29 20:00:03 crc kubenswrapper[4779]: I0929 20:00:03.741521 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xv75g\" (UniqueName: \"kubernetes.io/projected/ed6dfb25-6e70-408c-8c31-4635ab3ba00c-kube-api-access-xv75g\") pod \"ed6dfb25-6e70-408c-8c31-4635ab3ba00c\" (UID: \"ed6dfb25-6e70-408c-8c31-4635ab3ba00c\") " Sep 29 20:00:03 crc kubenswrapper[4779]: I0929 20:00:03.741763 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/ed6dfb25-6e70-408c-8c31-4635ab3ba00c-config-volume\") pod \"ed6dfb25-6e70-408c-8c31-4635ab3ba00c\" (UID: \"ed6dfb25-6e70-408c-8c31-4635ab3ba00c\") " Sep 29 20:00:03 crc kubenswrapper[4779]: I0929 20:00:03.741869 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/ed6dfb25-6e70-408c-8c31-4635ab3ba00c-secret-volume\") pod \"ed6dfb25-6e70-408c-8c31-4635ab3ba00c\" (UID: \"ed6dfb25-6e70-408c-8c31-4635ab3ba00c\") " Sep 29 20:00:03 crc kubenswrapper[4779]: I0929 20:00:03.744912 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ed6dfb25-6e70-408c-8c31-4635ab3ba00c-config-volume" (OuterVolumeSpecName: "config-volume") pod "ed6dfb25-6e70-408c-8c31-4635ab3ba00c" (UID: "ed6dfb25-6e70-408c-8c31-4635ab3ba00c"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 20:00:03 crc kubenswrapper[4779]: I0929 20:00:03.750095 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ed6dfb25-6e70-408c-8c31-4635ab3ba00c-kube-api-access-xv75g" (OuterVolumeSpecName: "kube-api-access-xv75g") pod "ed6dfb25-6e70-408c-8c31-4635ab3ba00c" (UID: "ed6dfb25-6e70-408c-8c31-4635ab3ba00c"). InnerVolumeSpecName "kube-api-access-xv75g". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 20:00:03 crc kubenswrapper[4779]: I0929 20:00:03.753517 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ed6dfb25-6e70-408c-8c31-4635ab3ba00c-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "ed6dfb25-6e70-408c-8c31-4635ab3ba00c" (UID: "ed6dfb25-6e70-408c-8c31-4635ab3ba00c"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 20:00:03 crc kubenswrapper[4779]: I0929 20:00:03.845103 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xv75g\" (UniqueName: \"kubernetes.io/projected/ed6dfb25-6e70-408c-8c31-4635ab3ba00c-kube-api-access-xv75g\") on node \"crc\" DevicePath \"\"" Sep 29 20:00:03 crc kubenswrapper[4779]: I0929 20:00:03.845745 4779 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/ed6dfb25-6e70-408c-8c31-4635ab3ba00c-config-volume\") on node \"crc\" DevicePath \"\"" Sep 29 20:00:03 crc kubenswrapper[4779]: I0929 20:00:03.845763 4779 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/ed6dfb25-6e70-408c-8c31-4635ab3ba00c-secret-volume\") on node \"crc\" DevicePath \"\"" Sep 29 20:00:04 crc kubenswrapper[4779]: I0929 20:00:04.260842 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29319600-n8phb" event={"ID":"ed6dfb25-6e70-408c-8c31-4635ab3ba00c","Type":"ContainerDied","Data":"ed58f0425ec9d976884845b27f5b1dfdc1e1b3a4bfd8a8876838b7cce09bdb2b"} Sep 29 20:00:04 crc kubenswrapper[4779]: I0929 20:00:04.260920 4779 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ed58f0425ec9d976884845b27f5b1dfdc1e1b3a4bfd8a8876838b7cce09bdb2b" Sep 29 20:00:04 crc kubenswrapper[4779]: I0929 20:00:04.261007 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319600-n8phb" Sep 29 20:00:04 crc kubenswrapper[4779]: I0929 20:00:04.797053 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319555-8fndn"] Sep 29 20:00:04 crc kubenswrapper[4779]: I0929 20:00:04.805938 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319555-8fndn"] Sep 29 20:00:05 crc kubenswrapper[4779]: I0929 20:00:05.812265 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="18a164cd-10b2-4913-87b3-8ee84cc1a8b4" path="/var/lib/kubelet/pods/18a164cd-10b2-4913-87b3-8ee84cc1a8b4/volumes" Sep 29 20:00:13 crc kubenswrapper[4779]: I0929 20:00:13.785164 4779 patch_prober.go:28] interesting pod/machine-config-daemon-d5cnr container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 20:00:13 crc kubenswrapper[4779]: I0929 20:00:13.787566 4779 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" podUID="476bc421-1113-455e-bcc8-e207e47dad19" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 20:00:13 crc kubenswrapper[4779]: I0929 20:00:13.787662 4779 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" Sep 29 20:00:13 crc kubenswrapper[4779]: I0929 20:00:13.788775 4779 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"6d7a711d10d7b815d17c868c16f7d944cff261ad893818784ad5bfb184ae219a"} pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 29 20:00:13 crc kubenswrapper[4779]: I0929 20:00:13.788885 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" podUID="476bc421-1113-455e-bcc8-e207e47dad19" containerName="machine-config-daemon" containerID="cri-o://6d7a711d10d7b815d17c868c16f7d944cff261ad893818784ad5bfb184ae219a" gracePeriod=600 Sep 29 20:00:13 crc kubenswrapper[4779]: E0929 20:00:13.924233 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d5cnr_openshift-machine-config-operator(476bc421-1113-455e-bcc8-e207e47dad19)\"" pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" podUID="476bc421-1113-455e-bcc8-e207e47dad19" Sep 29 20:00:14 crc kubenswrapper[4779]: I0929 20:00:14.384153 4779 generic.go:334] "Generic (PLEG): container finished" podID="476bc421-1113-455e-bcc8-e207e47dad19" containerID="6d7a711d10d7b815d17c868c16f7d944cff261ad893818784ad5bfb184ae219a" exitCode=0 Sep 29 20:00:14 crc kubenswrapper[4779]: I0929 20:00:14.384222 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" event={"ID":"476bc421-1113-455e-bcc8-e207e47dad19","Type":"ContainerDied","Data":"6d7a711d10d7b815d17c868c16f7d944cff261ad893818784ad5bfb184ae219a"} Sep 29 20:00:14 crc kubenswrapper[4779]: I0929 20:00:14.384280 4779 scope.go:117] "RemoveContainer" containerID="b21c66ee30542a53761986d79c2d6a62c4d6497440c6bb49825d675e2825fb98" Sep 29 20:00:14 crc kubenswrapper[4779]: I0929 20:00:14.385065 4779 scope.go:117] "RemoveContainer" containerID="6d7a711d10d7b815d17c868c16f7d944cff261ad893818784ad5bfb184ae219a" Sep 29 20:00:14 crc kubenswrapper[4779]: E0929 20:00:14.385374 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d5cnr_openshift-machine-config-operator(476bc421-1113-455e-bcc8-e207e47dad19)\"" pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" podUID="476bc421-1113-455e-bcc8-e207e47dad19" Sep 29 20:00:17 crc kubenswrapper[4779]: I0929 20:00:17.499238 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-x2fgq"] Sep 29 20:00:17 crc kubenswrapper[4779]: E0929 20:00:17.500343 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ed6dfb25-6e70-408c-8c31-4635ab3ba00c" containerName="collect-profiles" Sep 29 20:00:17 crc kubenswrapper[4779]: I0929 20:00:17.500366 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="ed6dfb25-6e70-408c-8c31-4635ab3ba00c" containerName="collect-profiles" Sep 29 20:00:17 crc kubenswrapper[4779]: I0929 20:00:17.500763 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="ed6dfb25-6e70-408c-8c31-4635ab3ba00c" containerName="collect-profiles" Sep 29 20:00:17 crc kubenswrapper[4779]: I0929 20:00:17.503263 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-x2fgq" Sep 29 20:00:17 crc kubenswrapper[4779]: I0929 20:00:17.512113 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-x2fgq"] Sep 29 20:00:17 crc kubenswrapper[4779]: I0929 20:00:17.546407 4779 scope.go:117] "RemoveContainer" containerID="34d910cb77404a1b7e311671dd5eccb66f82340b0742d59394ed6284949e0cdb" Sep 29 20:00:17 crc kubenswrapper[4779]: I0929 20:00:17.570540 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/11639700-67d7-4d3d-abff-7186c1a98272-utilities\") pod \"community-operators-x2fgq\" (UID: \"11639700-67d7-4d3d-abff-7186c1a98272\") " pod="openshift-marketplace/community-operators-x2fgq" Sep 29 20:00:17 crc kubenswrapper[4779]: I0929 20:00:17.570919 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gh4cv\" (UniqueName: \"kubernetes.io/projected/11639700-67d7-4d3d-abff-7186c1a98272-kube-api-access-gh4cv\") pod \"community-operators-x2fgq\" (UID: \"11639700-67d7-4d3d-abff-7186c1a98272\") " pod="openshift-marketplace/community-operators-x2fgq" Sep 29 20:00:17 crc kubenswrapper[4779]: I0929 20:00:17.570943 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/11639700-67d7-4d3d-abff-7186c1a98272-catalog-content\") pod \"community-operators-x2fgq\" (UID: \"11639700-67d7-4d3d-abff-7186c1a98272\") " pod="openshift-marketplace/community-operators-x2fgq" Sep 29 20:00:17 crc kubenswrapper[4779]: I0929 20:00:17.673555 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gh4cv\" (UniqueName: \"kubernetes.io/projected/11639700-67d7-4d3d-abff-7186c1a98272-kube-api-access-gh4cv\") pod \"community-operators-x2fgq\" (UID: \"11639700-67d7-4d3d-abff-7186c1a98272\") " pod="openshift-marketplace/community-operators-x2fgq" Sep 29 20:00:17 crc kubenswrapper[4779]: I0929 20:00:17.673646 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/11639700-67d7-4d3d-abff-7186c1a98272-catalog-content\") pod \"community-operators-x2fgq\" (UID: \"11639700-67d7-4d3d-abff-7186c1a98272\") " pod="openshift-marketplace/community-operators-x2fgq" Sep 29 20:00:17 crc kubenswrapper[4779]: I0929 20:00:17.674071 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/11639700-67d7-4d3d-abff-7186c1a98272-utilities\") pod \"community-operators-x2fgq\" (UID: \"11639700-67d7-4d3d-abff-7186c1a98272\") " pod="openshift-marketplace/community-operators-x2fgq" Sep 29 20:00:17 crc kubenswrapper[4779]: I0929 20:00:17.674558 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/11639700-67d7-4d3d-abff-7186c1a98272-catalog-content\") pod \"community-operators-x2fgq\" (UID: \"11639700-67d7-4d3d-abff-7186c1a98272\") " pod="openshift-marketplace/community-operators-x2fgq" Sep 29 20:00:17 crc kubenswrapper[4779]: I0929 20:00:17.674622 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/11639700-67d7-4d3d-abff-7186c1a98272-utilities\") pod \"community-operators-x2fgq\" (UID: \"11639700-67d7-4d3d-abff-7186c1a98272\") " pod="openshift-marketplace/community-operators-x2fgq" Sep 29 20:00:17 crc kubenswrapper[4779]: I0929 20:00:17.702650 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gh4cv\" (UniqueName: \"kubernetes.io/projected/11639700-67d7-4d3d-abff-7186c1a98272-kube-api-access-gh4cv\") pod \"community-operators-x2fgq\" (UID: \"11639700-67d7-4d3d-abff-7186c1a98272\") " pod="openshift-marketplace/community-operators-x2fgq" Sep 29 20:00:17 crc kubenswrapper[4779]: I0929 20:00:17.836846 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-x2fgq" Sep 29 20:00:18 crc kubenswrapper[4779]: I0929 20:00:18.343059 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-x2fgq"] Sep 29 20:00:18 crc kubenswrapper[4779]: I0929 20:00:18.433053 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-x2fgq" event={"ID":"11639700-67d7-4d3d-abff-7186c1a98272","Type":"ContainerStarted","Data":"3ef5b51b67f85efcde521f77a081bbef21cacbee6f363db2d976101e9c72e5ee"} Sep 29 20:00:19 crc kubenswrapper[4779]: I0929 20:00:19.449300 4779 generic.go:334] "Generic (PLEG): container finished" podID="11639700-67d7-4d3d-abff-7186c1a98272" containerID="6e4004a1826ee96b69d898fa726d77f7ab6b428838e3bb2d2b6e1d82aea8df1d" exitCode=0 Sep 29 20:00:19 crc kubenswrapper[4779]: I0929 20:00:19.449361 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-x2fgq" event={"ID":"11639700-67d7-4d3d-abff-7186c1a98272","Type":"ContainerDied","Data":"6e4004a1826ee96b69d898fa726d77f7ab6b428838e3bb2d2b6e1d82aea8df1d"} Sep 29 20:00:19 crc kubenswrapper[4779]: I0929 20:00:19.452551 4779 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Sep 29 20:00:21 crc kubenswrapper[4779]: I0929 20:00:21.473754 4779 generic.go:334] "Generic (PLEG): container finished" podID="11639700-67d7-4d3d-abff-7186c1a98272" containerID="3eaf9d2b60002ac71aa9366c0a9b89e4ad60912100f880e5d88fe82b1116ce84" exitCode=0 Sep 29 20:00:21 crc kubenswrapper[4779]: I0929 20:00:21.473842 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-x2fgq" event={"ID":"11639700-67d7-4d3d-abff-7186c1a98272","Type":"ContainerDied","Data":"3eaf9d2b60002ac71aa9366c0a9b89e4ad60912100f880e5d88fe82b1116ce84"} Sep 29 20:00:22 crc kubenswrapper[4779]: I0929 20:00:22.485077 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-x2fgq" event={"ID":"11639700-67d7-4d3d-abff-7186c1a98272","Type":"ContainerStarted","Data":"7a1450ebfb283312b9cdda1332ab0b0b28fefeb643c12ff36b75a88b58f23050"} Sep 29 20:00:22 crc kubenswrapper[4779]: I0929 20:00:22.507527 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-x2fgq" podStartSLOduration=2.8559552999999998 podStartE2EDuration="5.507502473s" podCreationTimestamp="2025-09-29 20:00:17 +0000 UTC" firstStartedPulling="2025-09-29 20:00:19.452090709 +0000 UTC m=+3130.336515849" lastFinishedPulling="2025-09-29 20:00:22.103637912 +0000 UTC m=+3132.988063022" observedRunningTime="2025-09-29 20:00:22.504062179 +0000 UTC m=+3133.388487289" watchObservedRunningTime="2025-09-29 20:00:22.507502473 +0000 UTC m=+3133.391927593" Sep 29 20:00:27 crc kubenswrapper[4779]: I0929 20:00:27.837284 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-x2fgq" Sep 29 20:00:27 crc kubenswrapper[4779]: I0929 20:00:27.837934 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-x2fgq" Sep 29 20:00:27 crc kubenswrapper[4779]: I0929 20:00:27.883039 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-x2fgq" Sep 29 20:00:28 crc kubenswrapper[4779]: I0929 20:00:28.653954 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-x2fgq" Sep 29 20:00:29 crc kubenswrapper[4779]: I0929 20:00:29.283399 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-x2fgq"] Sep 29 20:00:29 crc kubenswrapper[4779]: I0929 20:00:29.771991 4779 scope.go:117] "RemoveContainer" containerID="6d7a711d10d7b815d17c868c16f7d944cff261ad893818784ad5bfb184ae219a" Sep 29 20:00:29 crc kubenswrapper[4779]: E0929 20:00:29.772263 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d5cnr_openshift-machine-config-operator(476bc421-1113-455e-bcc8-e207e47dad19)\"" pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" podUID="476bc421-1113-455e-bcc8-e207e47dad19" Sep 29 20:00:30 crc kubenswrapper[4779]: I0929 20:00:30.583153 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-x2fgq" podUID="11639700-67d7-4d3d-abff-7186c1a98272" containerName="registry-server" containerID="cri-o://7a1450ebfb283312b9cdda1332ab0b0b28fefeb643c12ff36b75a88b58f23050" gracePeriod=2 Sep 29 20:00:31 crc kubenswrapper[4779]: I0929 20:00:31.226717 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-x2fgq" Sep 29 20:00:31 crc kubenswrapper[4779]: I0929 20:00:31.288709 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/11639700-67d7-4d3d-abff-7186c1a98272-catalog-content\") pod \"11639700-67d7-4d3d-abff-7186c1a98272\" (UID: \"11639700-67d7-4d3d-abff-7186c1a98272\") " Sep 29 20:00:31 crc kubenswrapper[4779]: I0929 20:00:31.288914 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/11639700-67d7-4d3d-abff-7186c1a98272-utilities\") pod \"11639700-67d7-4d3d-abff-7186c1a98272\" (UID: \"11639700-67d7-4d3d-abff-7186c1a98272\") " Sep 29 20:00:31 crc kubenswrapper[4779]: I0929 20:00:31.289007 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gh4cv\" (UniqueName: \"kubernetes.io/projected/11639700-67d7-4d3d-abff-7186c1a98272-kube-api-access-gh4cv\") pod \"11639700-67d7-4d3d-abff-7186c1a98272\" (UID: \"11639700-67d7-4d3d-abff-7186c1a98272\") " Sep 29 20:00:31 crc kubenswrapper[4779]: I0929 20:00:31.289655 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/11639700-67d7-4d3d-abff-7186c1a98272-utilities" (OuterVolumeSpecName: "utilities") pod "11639700-67d7-4d3d-abff-7186c1a98272" (UID: "11639700-67d7-4d3d-abff-7186c1a98272"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 20:00:31 crc kubenswrapper[4779]: I0929 20:00:31.289903 4779 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/11639700-67d7-4d3d-abff-7186c1a98272-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 20:00:31 crc kubenswrapper[4779]: I0929 20:00:31.294505 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/11639700-67d7-4d3d-abff-7186c1a98272-kube-api-access-gh4cv" (OuterVolumeSpecName: "kube-api-access-gh4cv") pod "11639700-67d7-4d3d-abff-7186c1a98272" (UID: "11639700-67d7-4d3d-abff-7186c1a98272"). InnerVolumeSpecName "kube-api-access-gh4cv". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 20:00:31 crc kubenswrapper[4779]: I0929 20:00:31.344236 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/11639700-67d7-4d3d-abff-7186c1a98272-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "11639700-67d7-4d3d-abff-7186c1a98272" (UID: "11639700-67d7-4d3d-abff-7186c1a98272"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 20:00:31 crc kubenswrapper[4779]: I0929 20:00:31.391675 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gh4cv\" (UniqueName: \"kubernetes.io/projected/11639700-67d7-4d3d-abff-7186c1a98272-kube-api-access-gh4cv\") on node \"crc\" DevicePath \"\"" Sep 29 20:00:31 crc kubenswrapper[4779]: I0929 20:00:31.391707 4779 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/11639700-67d7-4d3d-abff-7186c1a98272-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 20:00:31 crc kubenswrapper[4779]: I0929 20:00:31.593454 4779 generic.go:334] "Generic (PLEG): container finished" podID="11639700-67d7-4d3d-abff-7186c1a98272" containerID="7a1450ebfb283312b9cdda1332ab0b0b28fefeb643c12ff36b75a88b58f23050" exitCode=0 Sep 29 20:00:31 crc kubenswrapper[4779]: I0929 20:00:31.593512 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-x2fgq" Sep 29 20:00:31 crc kubenswrapper[4779]: I0929 20:00:31.593525 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-x2fgq" event={"ID":"11639700-67d7-4d3d-abff-7186c1a98272","Type":"ContainerDied","Data":"7a1450ebfb283312b9cdda1332ab0b0b28fefeb643c12ff36b75a88b58f23050"} Sep 29 20:00:31 crc kubenswrapper[4779]: I0929 20:00:31.593926 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-x2fgq" event={"ID":"11639700-67d7-4d3d-abff-7186c1a98272","Type":"ContainerDied","Data":"3ef5b51b67f85efcde521f77a081bbef21cacbee6f363db2d976101e9c72e5ee"} Sep 29 20:00:31 crc kubenswrapper[4779]: I0929 20:00:31.593962 4779 scope.go:117] "RemoveContainer" containerID="7a1450ebfb283312b9cdda1332ab0b0b28fefeb643c12ff36b75a88b58f23050" Sep 29 20:00:31 crc kubenswrapper[4779]: I0929 20:00:31.621497 4779 scope.go:117] "RemoveContainer" containerID="3eaf9d2b60002ac71aa9366c0a9b89e4ad60912100f880e5d88fe82b1116ce84" Sep 29 20:00:31 crc kubenswrapper[4779]: I0929 20:00:31.646140 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-x2fgq"] Sep 29 20:00:31 crc kubenswrapper[4779]: I0929 20:00:31.649077 4779 scope.go:117] "RemoveContainer" containerID="6e4004a1826ee96b69d898fa726d77f7ab6b428838e3bb2d2b6e1d82aea8df1d" Sep 29 20:00:31 crc kubenswrapper[4779]: I0929 20:00:31.656777 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-x2fgq"] Sep 29 20:00:31 crc kubenswrapper[4779]: I0929 20:00:31.693538 4779 scope.go:117] "RemoveContainer" containerID="7a1450ebfb283312b9cdda1332ab0b0b28fefeb643c12ff36b75a88b58f23050" Sep 29 20:00:31 crc kubenswrapper[4779]: E0929 20:00:31.693934 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7a1450ebfb283312b9cdda1332ab0b0b28fefeb643c12ff36b75a88b58f23050\": container with ID starting with 7a1450ebfb283312b9cdda1332ab0b0b28fefeb643c12ff36b75a88b58f23050 not found: ID does not exist" containerID="7a1450ebfb283312b9cdda1332ab0b0b28fefeb643c12ff36b75a88b58f23050" Sep 29 20:00:31 crc kubenswrapper[4779]: I0929 20:00:31.693977 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7a1450ebfb283312b9cdda1332ab0b0b28fefeb643c12ff36b75a88b58f23050"} err="failed to get container status \"7a1450ebfb283312b9cdda1332ab0b0b28fefeb643c12ff36b75a88b58f23050\": rpc error: code = NotFound desc = could not find container \"7a1450ebfb283312b9cdda1332ab0b0b28fefeb643c12ff36b75a88b58f23050\": container with ID starting with 7a1450ebfb283312b9cdda1332ab0b0b28fefeb643c12ff36b75a88b58f23050 not found: ID does not exist" Sep 29 20:00:31 crc kubenswrapper[4779]: I0929 20:00:31.694006 4779 scope.go:117] "RemoveContainer" containerID="3eaf9d2b60002ac71aa9366c0a9b89e4ad60912100f880e5d88fe82b1116ce84" Sep 29 20:00:31 crc kubenswrapper[4779]: E0929 20:00:31.694302 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3eaf9d2b60002ac71aa9366c0a9b89e4ad60912100f880e5d88fe82b1116ce84\": container with ID starting with 3eaf9d2b60002ac71aa9366c0a9b89e4ad60912100f880e5d88fe82b1116ce84 not found: ID does not exist" containerID="3eaf9d2b60002ac71aa9366c0a9b89e4ad60912100f880e5d88fe82b1116ce84" Sep 29 20:00:31 crc kubenswrapper[4779]: I0929 20:00:31.694342 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3eaf9d2b60002ac71aa9366c0a9b89e4ad60912100f880e5d88fe82b1116ce84"} err="failed to get container status \"3eaf9d2b60002ac71aa9366c0a9b89e4ad60912100f880e5d88fe82b1116ce84\": rpc error: code = NotFound desc = could not find container \"3eaf9d2b60002ac71aa9366c0a9b89e4ad60912100f880e5d88fe82b1116ce84\": container with ID starting with 3eaf9d2b60002ac71aa9366c0a9b89e4ad60912100f880e5d88fe82b1116ce84 not found: ID does not exist" Sep 29 20:00:31 crc kubenswrapper[4779]: I0929 20:00:31.694359 4779 scope.go:117] "RemoveContainer" containerID="6e4004a1826ee96b69d898fa726d77f7ab6b428838e3bb2d2b6e1d82aea8df1d" Sep 29 20:00:31 crc kubenswrapper[4779]: E0929 20:00:31.694641 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6e4004a1826ee96b69d898fa726d77f7ab6b428838e3bb2d2b6e1d82aea8df1d\": container with ID starting with 6e4004a1826ee96b69d898fa726d77f7ab6b428838e3bb2d2b6e1d82aea8df1d not found: ID does not exist" containerID="6e4004a1826ee96b69d898fa726d77f7ab6b428838e3bb2d2b6e1d82aea8df1d" Sep 29 20:00:31 crc kubenswrapper[4779]: I0929 20:00:31.694703 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6e4004a1826ee96b69d898fa726d77f7ab6b428838e3bb2d2b6e1d82aea8df1d"} err="failed to get container status \"6e4004a1826ee96b69d898fa726d77f7ab6b428838e3bb2d2b6e1d82aea8df1d\": rpc error: code = NotFound desc = could not find container \"6e4004a1826ee96b69d898fa726d77f7ab6b428838e3bb2d2b6e1d82aea8df1d\": container with ID starting with 6e4004a1826ee96b69d898fa726d77f7ab6b428838e3bb2d2b6e1d82aea8df1d not found: ID does not exist" Sep 29 20:00:31 crc kubenswrapper[4779]: E0929 20:00:31.736512 4779 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod11639700_67d7_4d3d_abff_7186c1a98272.slice\": RecentStats: unable to find data in memory cache]" Sep 29 20:00:31 crc kubenswrapper[4779]: I0929 20:00:31.776229 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="11639700-67d7-4d3d-abff-7186c1a98272" path="/var/lib/kubelet/pods/11639700-67d7-4d3d-abff-7186c1a98272/volumes" Sep 29 20:00:41 crc kubenswrapper[4779]: I0929 20:00:41.765914 4779 scope.go:117] "RemoveContainer" containerID="6d7a711d10d7b815d17c868c16f7d944cff261ad893818784ad5bfb184ae219a" Sep 29 20:00:41 crc kubenswrapper[4779]: E0929 20:00:41.766669 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d5cnr_openshift-machine-config-operator(476bc421-1113-455e-bcc8-e207e47dad19)\"" pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" podUID="476bc421-1113-455e-bcc8-e207e47dad19" Sep 29 20:00:53 crc kubenswrapper[4779]: I0929 20:00:53.767380 4779 scope.go:117] "RemoveContainer" containerID="6d7a711d10d7b815d17c868c16f7d944cff261ad893818784ad5bfb184ae219a" Sep 29 20:00:53 crc kubenswrapper[4779]: E0929 20:00:53.768206 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d5cnr_openshift-machine-config-operator(476bc421-1113-455e-bcc8-e207e47dad19)\"" pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" podUID="476bc421-1113-455e-bcc8-e207e47dad19" Sep 29 20:01:00 crc kubenswrapper[4779]: I0929 20:01:00.163377 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-cron-29319601-sr9fp"] Sep 29 20:01:00 crc kubenswrapper[4779]: E0929 20:01:00.164562 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="11639700-67d7-4d3d-abff-7186c1a98272" containerName="extract-utilities" Sep 29 20:01:00 crc kubenswrapper[4779]: I0929 20:01:00.164589 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="11639700-67d7-4d3d-abff-7186c1a98272" containerName="extract-utilities" Sep 29 20:01:00 crc kubenswrapper[4779]: E0929 20:01:00.164626 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="11639700-67d7-4d3d-abff-7186c1a98272" containerName="extract-content" Sep 29 20:01:00 crc kubenswrapper[4779]: I0929 20:01:00.164639 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="11639700-67d7-4d3d-abff-7186c1a98272" containerName="extract-content" Sep 29 20:01:00 crc kubenswrapper[4779]: E0929 20:01:00.164678 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="11639700-67d7-4d3d-abff-7186c1a98272" containerName="registry-server" Sep 29 20:01:00 crc kubenswrapper[4779]: I0929 20:01:00.164691 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="11639700-67d7-4d3d-abff-7186c1a98272" containerName="registry-server" Sep 29 20:01:00 crc kubenswrapper[4779]: I0929 20:01:00.165075 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="11639700-67d7-4d3d-abff-7186c1a98272" containerName="registry-server" Sep 29 20:01:00 crc kubenswrapper[4779]: I0929 20:01:00.166052 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29319601-sr9fp" Sep 29 20:01:00 crc kubenswrapper[4779]: I0929 20:01:00.187353 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-cron-29319601-sr9fp"] Sep 29 20:01:00 crc kubenswrapper[4779]: I0929 20:01:00.237522 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/329c65e1-c7ba-4829-9ba7-6cbaf2e93d48-combined-ca-bundle\") pod \"keystone-cron-29319601-sr9fp\" (UID: \"329c65e1-c7ba-4829-9ba7-6cbaf2e93d48\") " pod="openstack/keystone-cron-29319601-sr9fp" Sep 29 20:01:00 crc kubenswrapper[4779]: I0929 20:01:00.237581 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n286w\" (UniqueName: \"kubernetes.io/projected/329c65e1-c7ba-4829-9ba7-6cbaf2e93d48-kube-api-access-n286w\") pod \"keystone-cron-29319601-sr9fp\" (UID: \"329c65e1-c7ba-4829-9ba7-6cbaf2e93d48\") " pod="openstack/keystone-cron-29319601-sr9fp" Sep 29 20:01:00 crc kubenswrapper[4779]: I0929 20:01:00.237793 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/329c65e1-c7ba-4829-9ba7-6cbaf2e93d48-config-data\") pod \"keystone-cron-29319601-sr9fp\" (UID: \"329c65e1-c7ba-4829-9ba7-6cbaf2e93d48\") " pod="openstack/keystone-cron-29319601-sr9fp" Sep 29 20:01:00 crc kubenswrapper[4779]: I0929 20:01:00.237840 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/329c65e1-c7ba-4829-9ba7-6cbaf2e93d48-fernet-keys\") pod \"keystone-cron-29319601-sr9fp\" (UID: \"329c65e1-c7ba-4829-9ba7-6cbaf2e93d48\") " pod="openstack/keystone-cron-29319601-sr9fp" Sep 29 20:01:00 crc kubenswrapper[4779]: I0929 20:01:00.340991 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/329c65e1-c7ba-4829-9ba7-6cbaf2e93d48-config-data\") pod \"keystone-cron-29319601-sr9fp\" (UID: \"329c65e1-c7ba-4829-9ba7-6cbaf2e93d48\") " pod="openstack/keystone-cron-29319601-sr9fp" Sep 29 20:01:00 crc kubenswrapper[4779]: I0929 20:01:00.341099 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/329c65e1-c7ba-4829-9ba7-6cbaf2e93d48-fernet-keys\") pod \"keystone-cron-29319601-sr9fp\" (UID: \"329c65e1-c7ba-4829-9ba7-6cbaf2e93d48\") " pod="openstack/keystone-cron-29319601-sr9fp" Sep 29 20:01:00 crc kubenswrapper[4779]: I0929 20:01:00.341308 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/329c65e1-c7ba-4829-9ba7-6cbaf2e93d48-combined-ca-bundle\") pod \"keystone-cron-29319601-sr9fp\" (UID: \"329c65e1-c7ba-4829-9ba7-6cbaf2e93d48\") " pod="openstack/keystone-cron-29319601-sr9fp" Sep 29 20:01:00 crc kubenswrapper[4779]: I0929 20:01:00.341394 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n286w\" (UniqueName: \"kubernetes.io/projected/329c65e1-c7ba-4829-9ba7-6cbaf2e93d48-kube-api-access-n286w\") pod \"keystone-cron-29319601-sr9fp\" (UID: \"329c65e1-c7ba-4829-9ba7-6cbaf2e93d48\") " pod="openstack/keystone-cron-29319601-sr9fp" Sep 29 20:01:00 crc kubenswrapper[4779]: I0929 20:01:00.351258 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/329c65e1-c7ba-4829-9ba7-6cbaf2e93d48-fernet-keys\") pod \"keystone-cron-29319601-sr9fp\" (UID: \"329c65e1-c7ba-4829-9ba7-6cbaf2e93d48\") " pod="openstack/keystone-cron-29319601-sr9fp" Sep 29 20:01:00 crc kubenswrapper[4779]: I0929 20:01:00.353746 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/329c65e1-c7ba-4829-9ba7-6cbaf2e93d48-combined-ca-bundle\") pod \"keystone-cron-29319601-sr9fp\" (UID: \"329c65e1-c7ba-4829-9ba7-6cbaf2e93d48\") " pod="openstack/keystone-cron-29319601-sr9fp" Sep 29 20:01:00 crc kubenswrapper[4779]: I0929 20:01:00.356639 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/329c65e1-c7ba-4829-9ba7-6cbaf2e93d48-config-data\") pod \"keystone-cron-29319601-sr9fp\" (UID: \"329c65e1-c7ba-4829-9ba7-6cbaf2e93d48\") " pod="openstack/keystone-cron-29319601-sr9fp" Sep 29 20:01:00 crc kubenswrapper[4779]: I0929 20:01:00.365403 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n286w\" (UniqueName: \"kubernetes.io/projected/329c65e1-c7ba-4829-9ba7-6cbaf2e93d48-kube-api-access-n286w\") pod \"keystone-cron-29319601-sr9fp\" (UID: \"329c65e1-c7ba-4829-9ba7-6cbaf2e93d48\") " pod="openstack/keystone-cron-29319601-sr9fp" Sep 29 20:01:00 crc kubenswrapper[4779]: I0929 20:01:00.499095 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29319601-sr9fp" Sep 29 20:01:00 crc kubenswrapper[4779]: I0929 20:01:00.940392 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-cron-29319601-sr9fp"] Sep 29 20:01:01 crc kubenswrapper[4779]: I0929 20:01:01.944032 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29319601-sr9fp" event={"ID":"329c65e1-c7ba-4829-9ba7-6cbaf2e93d48","Type":"ContainerStarted","Data":"636e8b179ad6d3c92f3bf3d235ba9913beb3e1d46ca01660c54dc4e36c5226e9"} Sep 29 20:01:01 crc kubenswrapper[4779]: I0929 20:01:01.944604 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29319601-sr9fp" event={"ID":"329c65e1-c7ba-4829-9ba7-6cbaf2e93d48","Type":"ContainerStarted","Data":"a9f6091028dfb9e5e627fd0a848280a231c7a601a9f152aa9c90d17bbe829554"} Sep 29 20:01:01 crc kubenswrapper[4779]: I0929 20:01:01.975794 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-cron-29319601-sr9fp" podStartSLOduration=1.975767319 podStartE2EDuration="1.975767319s" podCreationTimestamp="2025-09-29 20:01:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 20:01:01.975516992 +0000 UTC m=+3172.859942102" watchObservedRunningTime="2025-09-29 20:01:01.975767319 +0000 UTC m=+3172.860192459" Sep 29 20:01:03 crc kubenswrapper[4779]: I0929 20:01:03.968303 4779 generic.go:334] "Generic (PLEG): container finished" podID="329c65e1-c7ba-4829-9ba7-6cbaf2e93d48" containerID="636e8b179ad6d3c92f3bf3d235ba9913beb3e1d46ca01660c54dc4e36c5226e9" exitCode=0 Sep 29 20:01:03 crc kubenswrapper[4779]: I0929 20:01:03.968432 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29319601-sr9fp" event={"ID":"329c65e1-c7ba-4829-9ba7-6cbaf2e93d48","Type":"ContainerDied","Data":"636e8b179ad6d3c92f3bf3d235ba9913beb3e1d46ca01660c54dc4e36c5226e9"} Sep 29 20:01:05 crc kubenswrapper[4779]: I0929 20:01:05.433071 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29319601-sr9fp" Sep 29 20:01:05 crc kubenswrapper[4779]: I0929 20:01:05.584250 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-n286w\" (UniqueName: \"kubernetes.io/projected/329c65e1-c7ba-4829-9ba7-6cbaf2e93d48-kube-api-access-n286w\") pod \"329c65e1-c7ba-4829-9ba7-6cbaf2e93d48\" (UID: \"329c65e1-c7ba-4829-9ba7-6cbaf2e93d48\") " Sep 29 20:01:05 crc kubenswrapper[4779]: I0929 20:01:05.584377 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/329c65e1-c7ba-4829-9ba7-6cbaf2e93d48-combined-ca-bundle\") pod \"329c65e1-c7ba-4829-9ba7-6cbaf2e93d48\" (UID: \"329c65e1-c7ba-4829-9ba7-6cbaf2e93d48\") " Sep 29 20:01:05 crc kubenswrapper[4779]: I0929 20:01:05.584434 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/329c65e1-c7ba-4829-9ba7-6cbaf2e93d48-fernet-keys\") pod \"329c65e1-c7ba-4829-9ba7-6cbaf2e93d48\" (UID: \"329c65e1-c7ba-4829-9ba7-6cbaf2e93d48\") " Sep 29 20:01:05 crc kubenswrapper[4779]: I0929 20:01:05.584503 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/329c65e1-c7ba-4829-9ba7-6cbaf2e93d48-config-data\") pod \"329c65e1-c7ba-4829-9ba7-6cbaf2e93d48\" (UID: \"329c65e1-c7ba-4829-9ba7-6cbaf2e93d48\") " Sep 29 20:01:05 crc kubenswrapper[4779]: I0929 20:01:05.596372 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/329c65e1-c7ba-4829-9ba7-6cbaf2e93d48-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "329c65e1-c7ba-4829-9ba7-6cbaf2e93d48" (UID: "329c65e1-c7ba-4829-9ba7-6cbaf2e93d48"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 20:01:05 crc kubenswrapper[4779]: I0929 20:01:05.596950 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/329c65e1-c7ba-4829-9ba7-6cbaf2e93d48-kube-api-access-n286w" (OuterVolumeSpecName: "kube-api-access-n286w") pod "329c65e1-c7ba-4829-9ba7-6cbaf2e93d48" (UID: "329c65e1-c7ba-4829-9ba7-6cbaf2e93d48"). InnerVolumeSpecName "kube-api-access-n286w". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 20:01:05 crc kubenswrapper[4779]: I0929 20:01:05.637795 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/329c65e1-c7ba-4829-9ba7-6cbaf2e93d48-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "329c65e1-c7ba-4829-9ba7-6cbaf2e93d48" (UID: "329c65e1-c7ba-4829-9ba7-6cbaf2e93d48"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 20:01:05 crc kubenswrapper[4779]: I0929 20:01:05.656395 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/329c65e1-c7ba-4829-9ba7-6cbaf2e93d48-config-data" (OuterVolumeSpecName: "config-data") pod "329c65e1-c7ba-4829-9ba7-6cbaf2e93d48" (UID: "329c65e1-c7ba-4829-9ba7-6cbaf2e93d48"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 20:01:05 crc kubenswrapper[4779]: I0929 20:01:05.686866 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-n286w\" (UniqueName: \"kubernetes.io/projected/329c65e1-c7ba-4829-9ba7-6cbaf2e93d48-kube-api-access-n286w\") on node \"crc\" DevicePath \"\"" Sep 29 20:01:05 crc kubenswrapper[4779]: I0929 20:01:05.686902 4779 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/329c65e1-c7ba-4829-9ba7-6cbaf2e93d48-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 20:01:05 crc kubenswrapper[4779]: I0929 20:01:05.686915 4779 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/329c65e1-c7ba-4829-9ba7-6cbaf2e93d48-fernet-keys\") on node \"crc\" DevicePath \"\"" Sep 29 20:01:05 crc kubenswrapper[4779]: I0929 20:01:05.686928 4779 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/329c65e1-c7ba-4829-9ba7-6cbaf2e93d48-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 20:01:05 crc kubenswrapper[4779]: I0929 20:01:05.998250 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29319601-sr9fp" event={"ID":"329c65e1-c7ba-4829-9ba7-6cbaf2e93d48","Type":"ContainerDied","Data":"a9f6091028dfb9e5e627fd0a848280a231c7a601a9f152aa9c90d17bbe829554"} Sep 29 20:01:05 crc kubenswrapper[4779]: I0929 20:01:05.998289 4779 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a9f6091028dfb9e5e627fd0a848280a231c7a601a9f152aa9c90d17bbe829554" Sep 29 20:01:05 crc kubenswrapper[4779]: I0929 20:01:05.998386 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29319601-sr9fp" Sep 29 20:01:06 crc kubenswrapper[4779]: I0929 20:01:06.766465 4779 scope.go:117] "RemoveContainer" containerID="6d7a711d10d7b815d17c868c16f7d944cff261ad893818784ad5bfb184ae219a" Sep 29 20:01:06 crc kubenswrapper[4779]: E0929 20:01:06.767486 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d5cnr_openshift-machine-config-operator(476bc421-1113-455e-bcc8-e207e47dad19)\"" pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" podUID="476bc421-1113-455e-bcc8-e207e47dad19" Sep 29 20:01:21 crc kubenswrapper[4779]: I0929 20:01:21.766421 4779 scope.go:117] "RemoveContainer" containerID="6d7a711d10d7b815d17c868c16f7d944cff261ad893818784ad5bfb184ae219a" Sep 29 20:01:21 crc kubenswrapper[4779]: E0929 20:01:21.767522 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d5cnr_openshift-machine-config-operator(476bc421-1113-455e-bcc8-e207e47dad19)\"" pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" podUID="476bc421-1113-455e-bcc8-e207e47dad19" Sep 29 20:01:35 crc kubenswrapper[4779]: I0929 20:01:35.766258 4779 scope.go:117] "RemoveContainer" containerID="6d7a711d10d7b815d17c868c16f7d944cff261ad893818784ad5bfb184ae219a" Sep 29 20:01:35 crc kubenswrapper[4779]: E0929 20:01:35.766947 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d5cnr_openshift-machine-config-operator(476bc421-1113-455e-bcc8-e207e47dad19)\"" pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" podUID="476bc421-1113-455e-bcc8-e207e47dad19" Sep 29 20:01:47 crc kubenswrapper[4779]: I0929 20:01:47.766013 4779 scope.go:117] "RemoveContainer" containerID="6d7a711d10d7b815d17c868c16f7d944cff261ad893818784ad5bfb184ae219a" Sep 29 20:01:47 crc kubenswrapper[4779]: E0929 20:01:47.766748 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d5cnr_openshift-machine-config-operator(476bc421-1113-455e-bcc8-e207e47dad19)\"" pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" podUID="476bc421-1113-455e-bcc8-e207e47dad19" Sep 29 20:02:00 crc kubenswrapper[4779]: I0929 20:02:00.766935 4779 scope.go:117] "RemoveContainer" containerID="6d7a711d10d7b815d17c868c16f7d944cff261ad893818784ad5bfb184ae219a" Sep 29 20:02:00 crc kubenswrapper[4779]: E0929 20:02:00.768155 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d5cnr_openshift-machine-config-operator(476bc421-1113-455e-bcc8-e207e47dad19)\"" pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" podUID="476bc421-1113-455e-bcc8-e207e47dad19" Sep 29 20:02:14 crc kubenswrapper[4779]: I0929 20:02:14.766608 4779 scope.go:117] "RemoveContainer" containerID="6d7a711d10d7b815d17c868c16f7d944cff261ad893818784ad5bfb184ae219a" Sep 29 20:02:14 crc kubenswrapper[4779]: E0929 20:02:14.769418 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d5cnr_openshift-machine-config-operator(476bc421-1113-455e-bcc8-e207e47dad19)\"" pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" podUID="476bc421-1113-455e-bcc8-e207e47dad19" Sep 29 20:02:26 crc kubenswrapper[4779]: I0929 20:02:26.766792 4779 scope.go:117] "RemoveContainer" containerID="6d7a711d10d7b815d17c868c16f7d944cff261ad893818784ad5bfb184ae219a" Sep 29 20:02:26 crc kubenswrapper[4779]: E0929 20:02:26.768235 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d5cnr_openshift-machine-config-operator(476bc421-1113-455e-bcc8-e207e47dad19)\"" pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" podUID="476bc421-1113-455e-bcc8-e207e47dad19" Sep 29 20:02:40 crc kubenswrapper[4779]: I0929 20:02:40.768848 4779 scope.go:117] "RemoveContainer" containerID="6d7a711d10d7b815d17c868c16f7d944cff261ad893818784ad5bfb184ae219a" Sep 29 20:02:40 crc kubenswrapper[4779]: E0929 20:02:40.770756 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d5cnr_openshift-machine-config-operator(476bc421-1113-455e-bcc8-e207e47dad19)\"" pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" podUID="476bc421-1113-455e-bcc8-e207e47dad19" Sep 29 20:02:42 crc kubenswrapper[4779]: I0929 20:02:42.637633 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-k882r"] Sep 29 20:02:42 crc kubenswrapper[4779]: E0929 20:02:42.638439 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="329c65e1-c7ba-4829-9ba7-6cbaf2e93d48" containerName="keystone-cron" Sep 29 20:02:42 crc kubenswrapper[4779]: I0929 20:02:42.638457 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="329c65e1-c7ba-4829-9ba7-6cbaf2e93d48" containerName="keystone-cron" Sep 29 20:02:42 crc kubenswrapper[4779]: I0929 20:02:42.638726 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="329c65e1-c7ba-4829-9ba7-6cbaf2e93d48" containerName="keystone-cron" Sep 29 20:02:42 crc kubenswrapper[4779]: I0929 20:02:42.642731 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-k882r" Sep 29 20:02:42 crc kubenswrapper[4779]: I0929 20:02:42.663120 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-k882r"] Sep 29 20:02:42 crc kubenswrapper[4779]: I0929 20:02:42.727702 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-shqc2\" (UniqueName: \"kubernetes.io/projected/6f3232d5-9535-4947-a06d-c47953e9a631-kube-api-access-shqc2\") pod \"redhat-marketplace-k882r\" (UID: \"6f3232d5-9535-4947-a06d-c47953e9a631\") " pod="openshift-marketplace/redhat-marketplace-k882r" Sep 29 20:02:42 crc kubenswrapper[4779]: I0929 20:02:42.728025 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6f3232d5-9535-4947-a06d-c47953e9a631-catalog-content\") pod \"redhat-marketplace-k882r\" (UID: \"6f3232d5-9535-4947-a06d-c47953e9a631\") " pod="openshift-marketplace/redhat-marketplace-k882r" Sep 29 20:02:42 crc kubenswrapper[4779]: I0929 20:02:42.728052 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6f3232d5-9535-4947-a06d-c47953e9a631-utilities\") pod \"redhat-marketplace-k882r\" (UID: \"6f3232d5-9535-4947-a06d-c47953e9a631\") " pod="openshift-marketplace/redhat-marketplace-k882r" Sep 29 20:02:42 crc kubenswrapper[4779]: I0929 20:02:42.829457 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-shqc2\" (UniqueName: \"kubernetes.io/projected/6f3232d5-9535-4947-a06d-c47953e9a631-kube-api-access-shqc2\") pod \"redhat-marketplace-k882r\" (UID: \"6f3232d5-9535-4947-a06d-c47953e9a631\") " pod="openshift-marketplace/redhat-marketplace-k882r" Sep 29 20:02:42 crc kubenswrapper[4779]: I0929 20:02:42.829532 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6f3232d5-9535-4947-a06d-c47953e9a631-catalog-content\") pod \"redhat-marketplace-k882r\" (UID: \"6f3232d5-9535-4947-a06d-c47953e9a631\") " pod="openshift-marketplace/redhat-marketplace-k882r" Sep 29 20:02:42 crc kubenswrapper[4779]: I0929 20:02:42.829571 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6f3232d5-9535-4947-a06d-c47953e9a631-utilities\") pod \"redhat-marketplace-k882r\" (UID: \"6f3232d5-9535-4947-a06d-c47953e9a631\") " pod="openshift-marketplace/redhat-marketplace-k882r" Sep 29 20:02:42 crc kubenswrapper[4779]: I0929 20:02:42.830002 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6f3232d5-9535-4947-a06d-c47953e9a631-catalog-content\") pod \"redhat-marketplace-k882r\" (UID: \"6f3232d5-9535-4947-a06d-c47953e9a631\") " pod="openshift-marketplace/redhat-marketplace-k882r" Sep 29 20:02:42 crc kubenswrapper[4779]: I0929 20:02:42.830074 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6f3232d5-9535-4947-a06d-c47953e9a631-utilities\") pod \"redhat-marketplace-k882r\" (UID: \"6f3232d5-9535-4947-a06d-c47953e9a631\") " pod="openshift-marketplace/redhat-marketplace-k882r" Sep 29 20:02:42 crc kubenswrapper[4779]: I0929 20:02:42.849353 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-shqc2\" (UniqueName: \"kubernetes.io/projected/6f3232d5-9535-4947-a06d-c47953e9a631-kube-api-access-shqc2\") pod \"redhat-marketplace-k882r\" (UID: \"6f3232d5-9535-4947-a06d-c47953e9a631\") " pod="openshift-marketplace/redhat-marketplace-k882r" Sep 29 20:02:42 crc kubenswrapper[4779]: I0929 20:02:42.989409 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-k882r" Sep 29 20:02:43 crc kubenswrapper[4779]: I0929 20:02:43.439551 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-k882r"] Sep 29 20:02:44 crc kubenswrapper[4779]: I0929 20:02:44.220497 4779 generic.go:334] "Generic (PLEG): container finished" podID="6f3232d5-9535-4947-a06d-c47953e9a631" containerID="02d89c96d3a9691d66323c3b21f18e83786dd1f3464f5e52504b664b79683ba9" exitCode=0 Sep 29 20:02:44 crc kubenswrapper[4779]: I0929 20:02:44.220603 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-k882r" event={"ID":"6f3232d5-9535-4947-a06d-c47953e9a631","Type":"ContainerDied","Data":"02d89c96d3a9691d66323c3b21f18e83786dd1f3464f5e52504b664b79683ba9"} Sep 29 20:02:44 crc kubenswrapper[4779]: I0929 20:02:44.220778 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-k882r" event={"ID":"6f3232d5-9535-4947-a06d-c47953e9a631","Type":"ContainerStarted","Data":"2993298dee41a0618d6a84ab40f455ddeffdc5ff80f9928122a0a7c82fabc5a0"} Sep 29 20:02:46 crc kubenswrapper[4779]: I0929 20:02:46.241754 4779 generic.go:334] "Generic (PLEG): container finished" podID="6f3232d5-9535-4947-a06d-c47953e9a631" containerID="440c48c0a5665daa9cb95b032484aa28df7ff9f3a43127cac42bb32d058b791d" exitCode=0 Sep 29 20:02:46 crc kubenswrapper[4779]: I0929 20:02:46.241870 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-k882r" event={"ID":"6f3232d5-9535-4947-a06d-c47953e9a631","Type":"ContainerDied","Data":"440c48c0a5665daa9cb95b032484aa28df7ff9f3a43127cac42bb32d058b791d"} Sep 29 20:02:47 crc kubenswrapper[4779]: I0929 20:02:47.263772 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-k882r" event={"ID":"6f3232d5-9535-4947-a06d-c47953e9a631","Type":"ContainerStarted","Data":"bf5fd31be0a2578db7d163bdc35c9c248aab40363d4505db2642323ac4f4b9ab"} Sep 29 20:02:47 crc kubenswrapper[4779]: I0929 20:02:47.284574 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-k882r" podStartSLOduration=2.883136698 podStartE2EDuration="5.284555468s" podCreationTimestamp="2025-09-29 20:02:42 +0000 UTC" firstStartedPulling="2025-09-29 20:02:44.223408348 +0000 UTC m=+3275.107833448" lastFinishedPulling="2025-09-29 20:02:46.624827118 +0000 UTC m=+3277.509252218" observedRunningTime="2025-09-29 20:02:47.282036769 +0000 UTC m=+3278.166461869" watchObservedRunningTime="2025-09-29 20:02:47.284555468 +0000 UTC m=+3278.168980578" Sep 29 20:02:52 crc kubenswrapper[4779]: I0929 20:02:52.990042 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-k882r" Sep 29 20:02:52 crc kubenswrapper[4779]: I0929 20:02:52.990560 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-k882r" Sep 29 20:02:53 crc kubenswrapper[4779]: I0929 20:02:53.043964 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-k882r" Sep 29 20:02:53 crc kubenswrapper[4779]: I0929 20:02:53.383032 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-k882r" Sep 29 20:02:53 crc kubenswrapper[4779]: I0929 20:02:53.443153 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-k882r"] Sep 29 20:02:55 crc kubenswrapper[4779]: I0929 20:02:55.346256 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-k882r" podUID="6f3232d5-9535-4947-a06d-c47953e9a631" containerName="registry-server" containerID="cri-o://bf5fd31be0a2578db7d163bdc35c9c248aab40363d4505db2642323ac4f4b9ab" gracePeriod=2 Sep 29 20:02:55 crc kubenswrapper[4779]: I0929 20:02:55.767385 4779 scope.go:117] "RemoveContainer" containerID="6d7a711d10d7b815d17c868c16f7d944cff261ad893818784ad5bfb184ae219a" Sep 29 20:02:55 crc kubenswrapper[4779]: E0929 20:02:55.768120 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d5cnr_openshift-machine-config-operator(476bc421-1113-455e-bcc8-e207e47dad19)\"" pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" podUID="476bc421-1113-455e-bcc8-e207e47dad19" Sep 29 20:02:55 crc kubenswrapper[4779]: I0929 20:02:55.962477 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-k882r" Sep 29 20:02:56 crc kubenswrapper[4779]: I0929 20:02:56.097211 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-shqc2\" (UniqueName: \"kubernetes.io/projected/6f3232d5-9535-4947-a06d-c47953e9a631-kube-api-access-shqc2\") pod \"6f3232d5-9535-4947-a06d-c47953e9a631\" (UID: \"6f3232d5-9535-4947-a06d-c47953e9a631\") " Sep 29 20:02:56 crc kubenswrapper[4779]: I0929 20:02:56.097294 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6f3232d5-9535-4947-a06d-c47953e9a631-catalog-content\") pod \"6f3232d5-9535-4947-a06d-c47953e9a631\" (UID: \"6f3232d5-9535-4947-a06d-c47953e9a631\") " Sep 29 20:02:56 crc kubenswrapper[4779]: I0929 20:02:56.097612 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6f3232d5-9535-4947-a06d-c47953e9a631-utilities\") pod \"6f3232d5-9535-4947-a06d-c47953e9a631\" (UID: \"6f3232d5-9535-4947-a06d-c47953e9a631\") " Sep 29 20:02:56 crc kubenswrapper[4779]: I0929 20:02:56.099117 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6f3232d5-9535-4947-a06d-c47953e9a631-utilities" (OuterVolumeSpecName: "utilities") pod "6f3232d5-9535-4947-a06d-c47953e9a631" (UID: "6f3232d5-9535-4947-a06d-c47953e9a631"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 20:02:56 crc kubenswrapper[4779]: I0929 20:02:56.106008 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6f3232d5-9535-4947-a06d-c47953e9a631-kube-api-access-shqc2" (OuterVolumeSpecName: "kube-api-access-shqc2") pod "6f3232d5-9535-4947-a06d-c47953e9a631" (UID: "6f3232d5-9535-4947-a06d-c47953e9a631"). InnerVolumeSpecName "kube-api-access-shqc2". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 20:02:56 crc kubenswrapper[4779]: I0929 20:02:56.112628 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6f3232d5-9535-4947-a06d-c47953e9a631-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "6f3232d5-9535-4947-a06d-c47953e9a631" (UID: "6f3232d5-9535-4947-a06d-c47953e9a631"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 20:02:56 crc kubenswrapper[4779]: I0929 20:02:56.200529 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-shqc2\" (UniqueName: \"kubernetes.io/projected/6f3232d5-9535-4947-a06d-c47953e9a631-kube-api-access-shqc2\") on node \"crc\" DevicePath \"\"" Sep 29 20:02:56 crc kubenswrapper[4779]: I0929 20:02:56.200929 4779 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6f3232d5-9535-4947-a06d-c47953e9a631-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 20:02:56 crc kubenswrapper[4779]: I0929 20:02:56.201755 4779 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6f3232d5-9535-4947-a06d-c47953e9a631-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 20:02:56 crc kubenswrapper[4779]: I0929 20:02:56.362653 4779 generic.go:334] "Generic (PLEG): container finished" podID="6f3232d5-9535-4947-a06d-c47953e9a631" containerID="bf5fd31be0a2578db7d163bdc35c9c248aab40363d4505db2642323ac4f4b9ab" exitCode=0 Sep 29 20:02:56 crc kubenswrapper[4779]: I0929 20:02:56.362740 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-k882r" Sep 29 20:02:56 crc kubenswrapper[4779]: I0929 20:02:56.362778 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-k882r" event={"ID":"6f3232d5-9535-4947-a06d-c47953e9a631","Type":"ContainerDied","Data":"bf5fd31be0a2578db7d163bdc35c9c248aab40363d4505db2642323ac4f4b9ab"} Sep 29 20:02:56 crc kubenswrapper[4779]: I0929 20:02:56.363436 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-k882r" event={"ID":"6f3232d5-9535-4947-a06d-c47953e9a631","Type":"ContainerDied","Data":"2993298dee41a0618d6a84ab40f455ddeffdc5ff80f9928122a0a7c82fabc5a0"} Sep 29 20:02:56 crc kubenswrapper[4779]: I0929 20:02:56.363498 4779 scope.go:117] "RemoveContainer" containerID="bf5fd31be0a2578db7d163bdc35c9c248aab40363d4505db2642323ac4f4b9ab" Sep 29 20:02:56 crc kubenswrapper[4779]: I0929 20:02:56.413068 4779 scope.go:117] "RemoveContainer" containerID="440c48c0a5665daa9cb95b032484aa28df7ff9f3a43127cac42bb32d058b791d" Sep 29 20:02:56 crc kubenswrapper[4779]: I0929 20:02:56.413559 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-k882r"] Sep 29 20:02:56 crc kubenswrapper[4779]: I0929 20:02:56.422879 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-k882r"] Sep 29 20:02:56 crc kubenswrapper[4779]: I0929 20:02:56.456134 4779 scope.go:117] "RemoveContainer" containerID="02d89c96d3a9691d66323c3b21f18e83786dd1f3464f5e52504b664b79683ba9" Sep 29 20:02:56 crc kubenswrapper[4779]: I0929 20:02:56.512755 4779 scope.go:117] "RemoveContainer" containerID="bf5fd31be0a2578db7d163bdc35c9c248aab40363d4505db2642323ac4f4b9ab" Sep 29 20:02:56 crc kubenswrapper[4779]: E0929 20:02:56.513150 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bf5fd31be0a2578db7d163bdc35c9c248aab40363d4505db2642323ac4f4b9ab\": container with ID starting with bf5fd31be0a2578db7d163bdc35c9c248aab40363d4505db2642323ac4f4b9ab not found: ID does not exist" containerID="bf5fd31be0a2578db7d163bdc35c9c248aab40363d4505db2642323ac4f4b9ab" Sep 29 20:02:56 crc kubenswrapper[4779]: I0929 20:02:56.513188 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bf5fd31be0a2578db7d163bdc35c9c248aab40363d4505db2642323ac4f4b9ab"} err="failed to get container status \"bf5fd31be0a2578db7d163bdc35c9c248aab40363d4505db2642323ac4f4b9ab\": rpc error: code = NotFound desc = could not find container \"bf5fd31be0a2578db7d163bdc35c9c248aab40363d4505db2642323ac4f4b9ab\": container with ID starting with bf5fd31be0a2578db7d163bdc35c9c248aab40363d4505db2642323ac4f4b9ab not found: ID does not exist" Sep 29 20:02:56 crc kubenswrapper[4779]: I0929 20:02:56.513212 4779 scope.go:117] "RemoveContainer" containerID="440c48c0a5665daa9cb95b032484aa28df7ff9f3a43127cac42bb32d058b791d" Sep 29 20:02:56 crc kubenswrapper[4779]: E0929 20:02:56.513603 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"440c48c0a5665daa9cb95b032484aa28df7ff9f3a43127cac42bb32d058b791d\": container with ID starting with 440c48c0a5665daa9cb95b032484aa28df7ff9f3a43127cac42bb32d058b791d not found: ID does not exist" containerID="440c48c0a5665daa9cb95b032484aa28df7ff9f3a43127cac42bb32d058b791d" Sep 29 20:02:56 crc kubenswrapper[4779]: I0929 20:02:56.513658 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"440c48c0a5665daa9cb95b032484aa28df7ff9f3a43127cac42bb32d058b791d"} err="failed to get container status \"440c48c0a5665daa9cb95b032484aa28df7ff9f3a43127cac42bb32d058b791d\": rpc error: code = NotFound desc = could not find container \"440c48c0a5665daa9cb95b032484aa28df7ff9f3a43127cac42bb32d058b791d\": container with ID starting with 440c48c0a5665daa9cb95b032484aa28df7ff9f3a43127cac42bb32d058b791d not found: ID does not exist" Sep 29 20:02:56 crc kubenswrapper[4779]: I0929 20:02:56.513678 4779 scope.go:117] "RemoveContainer" containerID="02d89c96d3a9691d66323c3b21f18e83786dd1f3464f5e52504b664b79683ba9" Sep 29 20:02:56 crc kubenswrapper[4779]: E0929 20:02:56.514016 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"02d89c96d3a9691d66323c3b21f18e83786dd1f3464f5e52504b664b79683ba9\": container with ID starting with 02d89c96d3a9691d66323c3b21f18e83786dd1f3464f5e52504b664b79683ba9 not found: ID does not exist" containerID="02d89c96d3a9691d66323c3b21f18e83786dd1f3464f5e52504b664b79683ba9" Sep 29 20:02:56 crc kubenswrapper[4779]: I0929 20:02:56.514043 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"02d89c96d3a9691d66323c3b21f18e83786dd1f3464f5e52504b664b79683ba9"} err="failed to get container status \"02d89c96d3a9691d66323c3b21f18e83786dd1f3464f5e52504b664b79683ba9\": rpc error: code = NotFound desc = could not find container \"02d89c96d3a9691d66323c3b21f18e83786dd1f3464f5e52504b664b79683ba9\": container with ID starting with 02d89c96d3a9691d66323c3b21f18e83786dd1f3464f5e52504b664b79683ba9 not found: ID does not exist" Sep 29 20:02:57 crc kubenswrapper[4779]: I0929 20:02:57.778083 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6f3232d5-9535-4947-a06d-c47953e9a631" path="/var/lib/kubelet/pods/6f3232d5-9535-4947-a06d-c47953e9a631/volumes" Sep 29 20:03:06 crc kubenswrapper[4779]: I0929 20:03:06.766501 4779 scope.go:117] "RemoveContainer" containerID="6d7a711d10d7b815d17c868c16f7d944cff261ad893818784ad5bfb184ae219a" Sep 29 20:03:06 crc kubenswrapper[4779]: E0929 20:03:06.767563 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d5cnr_openshift-machine-config-operator(476bc421-1113-455e-bcc8-e207e47dad19)\"" pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" podUID="476bc421-1113-455e-bcc8-e207e47dad19" Sep 29 20:03:18 crc kubenswrapper[4779]: I0929 20:03:18.766453 4779 scope.go:117] "RemoveContainer" containerID="6d7a711d10d7b815d17c868c16f7d944cff261ad893818784ad5bfb184ae219a" Sep 29 20:03:18 crc kubenswrapper[4779]: E0929 20:03:18.767504 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d5cnr_openshift-machine-config-operator(476bc421-1113-455e-bcc8-e207e47dad19)\"" pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" podUID="476bc421-1113-455e-bcc8-e207e47dad19" Sep 29 20:03:32 crc kubenswrapper[4779]: I0929 20:03:32.767044 4779 scope.go:117] "RemoveContainer" containerID="6d7a711d10d7b815d17c868c16f7d944cff261ad893818784ad5bfb184ae219a" Sep 29 20:03:32 crc kubenswrapper[4779]: E0929 20:03:32.768103 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d5cnr_openshift-machine-config-operator(476bc421-1113-455e-bcc8-e207e47dad19)\"" pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" podUID="476bc421-1113-455e-bcc8-e207e47dad19" Sep 29 20:03:46 crc kubenswrapper[4779]: I0929 20:03:46.766803 4779 scope.go:117] "RemoveContainer" containerID="6d7a711d10d7b815d17c868c16f7d944cff261ad893818784ad5bfb184ae219a" Sep 29 20:03:46 crc kubenswrapper[4779]: E0929 20:03:46.767804 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d5cnr_openshift-machine-config-operator(476bc421-1113-455e-bcc8-e207e47dad19)\"" pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" podUID="476bc421-1113-455e-bcc8-e207e47dad19" Sep 29 20:04:00 crc kubenswrapper[4779]: I0929 20:04:00.767474 4779 scope.go:117] "RemoveContainer" containerID="6d7a711d10d7b815d17c868c16f7d944cff261ad893818784ad5bfb184ae219a" Sep 29 20:04:00 crc kubenswrapper[4779]: E0929 20:04:00.768722 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d5cnr_openshift-machine-config-operator(476bc421-1113-455e-bcc8-e207e47dad19)\"" pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" podUID="476bc421-1113-455e-bcc8-e207e47dad19" Sep 29 20:04:12 crc kubenswrapper[4779]: I0929 20:04:12.765879 4779 scope.go:117] "RemoveContainer" containerID="6d7a711d10d7b815d17c868c16f7d944cff261ad893818784ad5bfb184ae219a" Sep 29 20:04:12 crc kubenswrapper[4779]: E0929 20:04:12.766754 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d5cnr_openshift-machine-config-operator(476bc421-1113-455e-bcc8-e207e47dad19)\"" pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" podUID="476bc421-1113-455e-bcc8-e207e47dad19" Sep 29 20:04:25 crc kubenswrapper[4779]: I0929 20:04:25.767265 4779 scope.go:117] "RemoveContainer" containerID="6d7a711d10d7b815d17c868c16f7d944cff261ad893818784ad5bfb184ae219a" Sep 29 20:04:25 crc kubenswrapper[4779]: E0929 20:04:25.768302 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d5cnr_openshift-machine-config-operator(476bc421-1113-455e-bcc8-e207e47dad19)\"" pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" podUID="476bc421-1113-455e-bcc8-e207e47dad19" Sep 29 20:04:36 crc kubenswrapper[4779]: I0929 20:04:36.766821 4779 scope.go:117] "RemoveContainer" containerID="6d7a711d10d7b815d17c868c16f7d944cff261ad893818784ad5bfb184ae219a" Sep 29 20:04:36 crc kubenswrapper[4779]: E0929 20:04:36.768358 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d5cnr_openshift-machine-config-operator(476bc421-1113-455e-bcc8-e207e47dad19)\"" pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" podUID="476bc421-1113-455e-bcc8-e207e47dad19" Sep 29 20:04:49 crc kubenswrapper[4779]: I0929 20:04:49.790695 4779 scope.go:117] "RemoveContainer" containerID="6d7a711d10d7b815d17c868c16f7d944cff261ad893818784ad5bfb184ae219a" Sep 29 20:04:49 crc kubenswrapper[4779]: E0929 20:04:49.791591 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d5cnr_openshift-machine-config-operator(476bc421-1113-455e-bcc8-e207e47dad19)\"" pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" podUID="476bc421-1113-455e-bcc8-e207e47dad19" Sep 29 20:05:02 crc kubenswrapper[4779]: I0929 20:05:02.767046 4779 scope.go:117] "RemoveContainer" containerID="6d7a711d10d7b815d17c868c16f7d944cff261ad893818784ad5bfb184ae219a" Sep 29 20:05:02 crc kubenswrapper[4779]: E0929 20:05:02.768228 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d5cnr_openshift-machine-config-operator(476bc421-1113-455e-bcc8-e207e47dad19)\"" pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" podUID="476bc421-1113-455e-bcc8-e207e47dad19" Sep 29 20:05:13 crc kubenswrapper[4779]: I0929 20:05:13.766932 4779 scope.go:117] "RemoveContainer" containerID="6d7a711d10d7b815d17c868c16f7d944cff261ad893818784ad5bfb184ae219a" Sep 29 20:05:13 crc kubenswrapper[4779]: E0929 20:05:13.767847 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d5cnr_openshift-machine-config-operator(476bc421-1113-455e-bcc8-e207e47dad19)\"" pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" podUID="476bc421-1113-455e-bcc8-e207e47dad19" Sep 29 20:05:28 crc kubenswrapper[4779]: I0929 20:05:28.766856 4779 scope.go:117] "RemoveContainer" containerID="6d7a711d10d7b815d17c868c16f7d944cff261ad893818784ad5bfb184ae219a" Sep 29 20:05:30 crc kubenswrapper[4779]: I0929 20:05:30.037946 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" event={"ID":"476bc421-1113-455e-bcc8-e207e47dad19","Type":"ContainerStarted","Data":"536cbc0e9dbc43b9fe51d71f773a7cc665ddb379a7083cffc33d4d69cea1ca23"} Sep 29 20:05:33 crc kubenswrapper[4779]: I0929 20:05:33.603085 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-9bvlk"] Sep 29 20:05:33 crc kubenswrapper[4779]: E0929 20:05:33.604092 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6f3232d5-9535-4947-a06d-c47953e9a631" containerName="registry-server" Sep 29 20:05:33 crc kubenswrapper[4779]: I0929 20:05:33.604107 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="6f3232d5-9535-4947-a06d-c47953e9a631" containerName="registry-server" Sep 29 20:05:33 crc kubenswrapper[4779]: E0929 20:05:33.604124 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6f3232d5-9535-4947-a06d-c47953e9a631" containerName="extract-utilities" Sep 29 20:05:33 crc kubenswrapper[4779]: I0929 20:05:33.604132 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="6f3232d5-9535-4947-a06d-c47953e9a631" containerName="extract-utilities" Sep 29 20:05:33 crc kubenswrapper[4779]: E0929 20:05:33.604176 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6f3232d5-9535-4947-a06d-c47953e9a631" containerName="extract-content" Sep 29 20:05:33 crc kubenswrapper[4779]: I0929 20:05:33.604184 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="6f3232d5-9535-4947-a06d-c47953e9a631" containerName="extract-content" Sep 29 20:05:33 crc kubenswrapper[4779]: I0929 20:05:33.604412 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="6f3232d5-9535-4947-a06d-c47953e9a631" containerName="registry-server" Sep 29 20:05:33 crc kubenswrapper[4779]: I0929 20:05:33.606036 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-9bvlk" Sep 29 20:05:33 crc kubenswrapper[4779]: I0929 20:05:33.625528 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-9bvlk"] Sep 29 20:05:33 crc kubenswrapper[4779]: I0929 20:05:33.663665 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cdfkd\" (UniqueName: \"kubernetes.io/projected/dbb7e8f2-3a3f-40bd-9e5f-d2c09e3cbfd4-kube-api-access-cdfkd\") pod \"certified-operators-9bvlk\" (UID: \"dbb7e8f2-3a3f-40bd-9e5f-d2c09e3cbfd4\") " pod="openshift-marketplace/certified-operators-9bvlk" Sep 29 20:05:33 crc kubenswrapper[4779]: I0929 20:05:33.663710 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dbb7e8f2-3a3f-40bd-9e5f-d2c09e3cbfd4-catalog-content\") pod \"certified-operators-9bvlk\" (UID: \"dbb7e8f2-3a3f-40bd-9e5f-d2c09e3cbfd4\") " pod="openshift-marketplace/certified-operators-9bvlk" Sep 29 20:05:33 crc kubenswrapper[4779]: I0929 20:05:33.663824 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dbb7e8f2-3a3f-40bd-9e5f-d2c09e3cbfd4-utilities\") pod \"certified-operators-9bvlk\" (UID: \"dbb7e8f2-3a3f-40bd-9e5f-d2c09e3cbfd4\") " pod="openshift-marketplace/certified-operators-9bvlk" Sep 29 20:05:33 crc kubenswrapper[4779]: I0929 20:05:33.765836 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cdfkd\" (UniqueName: \"kubernetes.io/projected/dbb7e8f2-3a3f-40bd-9e5f-d2c09e3cbfd4-kube-api-access-cdfkd\") pod \"certified-operators-9bvlk\" (UID: \"dbb7e8f2-3a3f-40bd-9e5f-d2c09e3cbfd4\") " pod="openshift-marketplace/certified-operators-9bvlk" Sep 29 20:05:33 crc kubenswrapper[4779]: I0929 20:05:33.766352 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dbb7e8f2-3a3f-40bd-9e5f-d2c09e3cbfd4-catalog-content\") pod \"certified-operators-9bvlk\" (UID: \"dbb7e8f2-3a3f-40bd-9e5f-d2c09e3cbfd4\") " pod="openshift-marketplace/certified-operators-9bvlk" Sep 29 20:05:33 crc kubenswrapper[4779]: I0929 20:05:33.766910 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dbb7e8f2-3a3f-40bd-9e5f-d2c09e3cbfd4-catalog-content\") pod \"certified-operators-9bvlk\" (UID: \"dbb7e8f2-3a3f-40bd-9e5f-d2c09e3cbfd4\") " pod="openshift-marketplace/certified-operators-9bvlk" Sep 29 20:05:33 crc kubenswrapper[4779]: I0929 20:05:33.767057 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dbb7e8f2-3a3f-40bd-9e5f-d2c09e3cbfd4-utilities\") pod \"certified-operators-9bvlk\" (UID: \"dbb7e8f2-3a3f-40bd-9e5f-d2c09e3cbfd4\") " pod="openshift-marketplace/certified-operators-9bvlk" Sep 29 20:05:33 crc kubenswrapper[4779]: I0929 20:05:33.767544 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dbb7e8f2-3a3f-40bd-9e5f-d2c09e3cbfd4-utilities\") pod \"certified-operators-9bvlk\" (UID: \"dbb7e8f2-3a3f-40bd-9e5f-d2c09e3cbfd4\") " pod="openshift-marketplace/certified-operators-9bvlk" Sep 29 20:05:33 crc kubenswrapper[4779]: I0929 20:05:33.791951 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cdfkd\" (UniqueName: \"kubernetes.io/projected/dbb7e8f2-3a3f-40bd-9e5f-d2c09e3cbfd4-kube-api-access-cdfkd\") pod \"certified-operators-9bvlk\" (UID: \"dbb7e8f2-3a3f-40bd-9e5f-d2c09e3cbfd4\") " pod="openshift-marketplace/certified-operators-9bvlk" Sep 29 20:05:33 crc kubenswrapper[4779]: I0929 20:05:33.967744 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-9bvlk" Sep 29 20:05:34 crc kubenswrapper[4779]: I0929 20:05:34.450224 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-9bvlk"] Sep 29 20:05:35 crc kubenswrapper[4779]: I0929 20:05:35.114715 4779 generic.go:334] "Generic (PLEG): container finished" podID="dbb7e8f2-3a3f-40bd-9e5f-d2c09e3cbfd4" containerID="21a4ce6a71931e7d01d9e289c1f2998176596fe5d775a70f533461ba327ad2c0" exitCode=0 Sep 29 20:05:35 crc kubenswrapper[4779]: I0929 20:05:35.114790 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-9bvlk" event={"ID":"dbb7e8f2-3a3f-40bd-9e5f-d2c09e3cbfd4","Type":"ContainerDied","Data":"21a4ce6a71931e7d01d9e289c1f2998176596fe5d775a70f533461ba327ad2c0"} Sep 29 20:05:35 crc kubenswrapper[4779]: I0929 20:05:35.114833 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-9bvlk" event={"ID":"dbb7e8f2-3a3f-40bd-9e5f-d2c09e3cbfd4","Type":"ContainerStarted","Data":"288e00b07af7bf34048015c9d963302c7fd5b056a5df5aa8a59a27e0e60693b5"} Sep 29 20:05:35 crc kubenswrapper[4779]: I0929 20:05:35.117969 4779 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Sep 29 20:05:36 crc kubenswrapper[4779]: I0929 20:05:36.137696 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-9bvlk" event={"ID":"dbb7e8f2-3a3f-40bd-9e5f-d2c09e3cbfd4","Type":"ContainerStarted","Data":"264b7c72ef9305fed0b1c4ab8b1c7726c7f6295cd47fcae5d7b2f088ae614254"} Sep 29 20:05:37 crc kubenswrapper[4779]: I0929 20:05:37.152531 4779 generic.go:334] "Generic (PLEG): container finished" podID="dbb7e8f2-3a3f-40bd-9e5f-d2c09e3cbfd4" containerID="264b7c72ef9305fed0b1c4ab8b1c7726c7f6295cd47fcae5d7b2f088ae614254" exitCode=0 Sep 29 20:05:37 crc kubenswrapper[4779]: I0929 20:05:37.152611 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-9bvlk" event={"ID":"dbb7e8f2-3a3f-40bd-9e5f-d2c09e3cbfd4","Type":"ContainerDied","Data":"264b7c72ef9305fed0b1c4ab8b1c7726c7f6295cd47fcae5d7b2f088ae614254"} Sep 29 20:05:38 crc kubenswrapper[4779]: I0929 20:05:38.163539 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-9bvlk" event={"ID":"dbb7e8f2-3a3f-40bd-9e5f-d2c09e3cbfd4","Type":"ContainerStarted","Data":"efe94a810fa73c787d471f778fdb9db1f52ecbf6de6c438fbf48eefb8d45c94e"} Sep 29 20:05:38 crc kubenswrapper[4779]: I0929 20:05:38.192997 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-9bvlk" podStartSLOduration=2.7106798899999998 podStartE2EDuration="5.192973129s" podCreationTimestamp="2025-09-29 20:05:33 +0000 UTC" firstStartedPulling="2025-09-29 20:05:35.11758722 +0000 UTC m=+3446.002012350" lastFinishedPulling="2025-09-29 20:05:37.599880469 +0000 UTC m=+3448.484305589" observedRunningTime="2025-09-29 20:05:38.184905808 +0000 UTC m=+3449.069330918" watchObservedRunningTime="2025-09-29 20:05:38.192973129 +0000 UTC m=+3449.077398249" Sep 29 20:05:43 crc kubenswrapper[4779]: I0929 20:05:43.968473 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-9bvlk" Sep 29 20:05:43 crc kubenswrapper[4779]: I0929 20:05:43.970169 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-9bvlk" Sep 29 20:05:44 crc kubenswrapper[4779]: I0929 20:05:44.038245 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-9bvlk" Sep 29 20:05:44 crc kubenswrapper[4779]: I0929 20:05:44.296168 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-9bvlk" Sep 29 20:05:44 crc kubenswrapper[4779]: I0929 20:05:44.358883 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-9bvlk"] Sep 29 20:05:46 crc kubenswrapper[4779]: I0929 20:05:46.264757 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-9bvlk" podUID="dbb7e8f2-3a3f-40bd-9e5f-d2c09e3cbfd4" containerName="registry-server" containerID="cri-o://efe94a810fa73c787d471f778fdb9db1f52ecbf6de6c438fbf48eefb8d45c94e" gracePeriod=2 Sep 29 20:05:46 crc kubenswrapper[4779]: I0929 20:05:46.884639 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-9bvlk" Sep 29 20:05:47 crc kubenswrapper[4779]: I0929 20:05:47.058099 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dbb7e8f2-3a3f-40bd-9e5f-d2c09e3cbfd4-catalog-content\") pod \"dbb7e8f2-3a3f-40bd-9e5f-d2c09e3cbfd4\" (UID: \"dbb7e8f2-3a3f-40bd-9e5f-d2c09e3cbfd4\") " Sep 29 20:05:47 crc kubenswrapper[4779]: I0929 20:05:47.058461 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dbb7e8f2-3a3f-40bd-9e5f-d2c09e3cbfd4-utilities\") pod \"dbb7e8f2-3a3f-40bd-9e5f-d2c09e3cbfd4\" (UID: \"dbb7e8f2-3a3f-40bd-9e5f-d2c09e3cbfd4\") " Sep 29 20:05:47 crc kubenswrapper[4779]: I0929 20:05:47.059602 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/dbb7e8f2-3a3f-40bd-9e5f-d2c09e3cbfd4-utilities" (OuterVolumeSpecName: "utilities") pod "dbb7e8f2-3a3f-40bd-9e5f-d2c09e3cbfd4" (UID: "dbb7e8f2-3a3f-40bd-9e5f-d2c09e3cbfd4"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 20:05:47 crc kubenswrapper[4779]: I0929 20:05:47.059630 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cdfkd\" (UniqueName: \"kubernetes.io/projected/dbb7e8f2-3a3f-40bd-9e5f-d2c09e3cbfd4-kube-api-access-cdfkd\") pod \"dbb7e8f2-3a3f-40bd-9e5f-d2c09e3cbfd4\" (UID: \"dbb7e8f2-3a3f-40bd-9e5f-d2c09e3cbfd4\") " Sep 29 20:05:47 crc kubenswrapper[4779]: I0929 20:05:47.061247 4779 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dbb7e8f2-3a3f-40bd-9e5f-d2c09e3cbfd4-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 20:05:47 crc kubenswrapper[4779]: I0929 20:05:47.069259 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dbb7e8f2-3a3f-40bd-9e5f-d2c09e3cbfd4-kube-api-access-cdfkd" (OuterVolumeSpecName: "kube-api-access-cdfkd") pod "dbb7e8f2-3a3f-40bd-9e5f-d2c09e3cbfd4" (UID: "dbb7e8f2-3a3f-40bd-9e5f-d2c09e3cbfd4"). InnerVolumeSpecName "kube-api-access-cdfkd". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 20:05:47 crc kubenswrapper[4779]: I0929 20:05:47.163742 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cdfkd\" (UniqueName: \"kubernetes.io/projected/dbb7e8f2-3a3f-40bd-9e5f-d2c09e3cbfd4-kube-api-access-cdfkd\") on node \"crc\" DevicePath \"\"" Sep 29 20:05:47 crc kubenswrapper[4779]: I0929 20:05:47.173455 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/dbb7e8f2-3a3f-40bd-9e5f-d2c09e3cbfd4-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "dbb7e8f2-3a3f-40bd-9e5f-d2c09e3cbfd4" (UID: "dbb7e8f2-3a3f-40bd-9e5f-d2c09e3cbfd4"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 20:05:47 crc kubenswrapper[4779]: I0929 20:05:47.266079 4779 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dbb7e8f2-3a3f-40bd-9e5f-d2c09e3cbfd4-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 20:05:47 crc kubenswrapper[4779]: I0929 20:05:47.277845 4779 generic.go:334] "Generic (PLEG): container finished" podID="dbb7e8f2-3a3f-40bd-9e5f-d2c09e3cbfd4" containerID="efe94a810fa73c787d471f778fdb9db1f52ecbf6de6c438fbf48eefb8d45c94e" exitCode=0 Sep 29 20:05:47 crc kubenswrapper[4779]: I0929 20:05:47.277931 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-9bvlk" event={"ID":"dbb7e8f2-3a3f-40bd-9e5f-d2c09e3cbfd4","Type":"ContainerDied","Data":"efe94a810fa73c787d471f778fdb9db1f52ecbf6de6c438fbf48eefb8d45c94e"} Sep 29 20:05:47 crc kubenswrapper[4779]: I0929 20:05:47.277986 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-9bvlk" event={"ID":"dbb7e8f2-3a3f-40bd-9e5f-d2c09e3cbfd4","Type":"ContainerDied","Data":"288e00b07af7bf34048015c9d963302c7fd5b056a5df5aa8a59a27e0e60693b5"} Sep 29 20:05:47 crc kubenswrapper[4779]: I0929 20:05:47.278005 4779 scope.go:117] "RemoveContainer" containerID="efe94a810fa73c787d471f778fdb9db1f52ecbf6de6c438fbf48eefb8d45c94e" Sep 29 20:05:47 crc kubenswrapper[4779]: I0929 20:05:47.277914 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-9bvlk" Sep 29 20:05:47 crc kubenswrapper[4779]: I0929 20:05:47.306694 4779 scope.go:117] "RemoveContainer" containerID="264b7c72ef9305fed0b1c4ab8b1c7726c7f6295cd47fcae5d7b2f088ae614254" Sep 29 20:05:47 crc kubenswrapper[4779]: I0929 20:05:47.337849 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-9bvlk"] Sep 29 20:05:47 crc kubenswrapper[4779]: I0929 20:05:47.349002 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-9bvlk"] Sep 29 20:05:47 crc kubenswrapper[4779]: I0929 20:05:47.366802 4779 scope.go:117] "RemoveContainer" containerID="21a4ce6a71931e7d01d9e289c1f2998176596fe5d775a70f533461ba327ad2c0" Sep 29 20:05:47 crc kubenswrapper[4779]: I0929 20:05:47.403377 4779 scope.go:117] "RemoveContainer" containerID="efe94a810fa73c787d471f778fdb9db1f52ecbf6de6c438fbf48eefb8d45c94e" Sep 29 20:05:47 crc kubenswrapper[4779]: E0929 20:05:47.403927 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"efe94a810fa73c787d471f778fdb9db1f52ecbf6de6c438fbf48eefb8d45c94e\": container with ID starting with efe94a810fa73c787d471f778fdb9db1f52ecbf6de6c438fbf48eefb8d45c94e not found: ID does not exist" containerID="efe94a810fa73c787d471f778fdb9db1f52ecbf6de6c438fbf48eefb8d45c94e" Sep 29 20:05:47 crc kubenswrapper[4779]: I0929 20:05:47.404037 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"efe94a810fa73c787d471f778fdb9db1f52ecbf6de6c438fbf48eefb8d45c94e"} err="failed to get container status \"efe94a810fa73c787d471f778fdb9db1f52ecbf6de6c438fbf48eefb8d45c94e\": rpc error: code = NotFound desc = could not find container \"efe94a810fa73c787d471f778fdb9db1f52ecbf6de6c438fbf48eefb8d45c94e\": container with ID starting with efe94a810fa73c787d471f778fdb9db1f52ecbf6de6c438fbf48eefb8d45c94e not found: ID does not exist" Sep 29 20:05:47 crc kubenswrapper[4779]: I0929 20:05:47.404125 4779 scope.go:117] "RemoveContainer" containerID="264b7c72ef9305fed0b1c4ab8b1c7726c7f6295cd47fcae5d7b2f088ae614254" Sep 29 20:05:47 crc kubenswrapper[4779]: E0929 20:05:47.404648 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"264b7c72ef9305fed0b1c4ab8b1c7726c7f6295cd47fcae5d7b2f088ae614254\": container with ID starting with 264b7c72ef9305fed0b1c4ab8b1c7726c7f6295cd47fcae5d7b2f088ae614254 not found: ID does not exist" containerID="264b7c72ef9305fed0b1c4ab8b1c7726c7f6295cd47fcae5d7b2f088ae614254" Sep 29 20:05:47 crc kubenswrapper[4779]: I0929 20:05:47.404702 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"264b7c72ef9305fed0b1c4ab8b1c7726c7f6295cd47fcae5d7b2f088ae614254"} err="failed to get container status \"264b7c72ef9305fed0b1c4ab8b1c7726c7f6295cd47fcae5d7b2f088ae614254\": rpc error: code = NotFound desc = could not find container \"264b7c72ef9305fed0b1c4ab8b1c7726c7f6295cd47fcae5d7b2f088ae614254\": container with ID starting with 264b7c72ef9305fed0b1c4ab8b1c7726c7f6295cd47fcae5d7b2f088ae614254 not found: ID does not exist" Sep 29 20:05:47 crc kubenswrapper[4779]: I0929 20:05:47.404736 4779 scope.go:117] "RemoveContainer" containerID="21a4ce6a71931e7d01d9e289c1f2998176596fe5d775a70f533461ba327ad2c0" Sep 29 20:05:47 crc kubenswrapper[4779]: E0929 20:05:47.405234 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"21a4ce6a71931e7d01d9e289c1f2998176596fe5d775a70f533461ba327ad2c0\": container with ID starting with 21a4ce6a71931e7d01d9e289c1f2998176596fe5d775a70f533461ba327ad2c0 not found: ID does not exist" containerID="21a4ce6a71931e7d01d9e289c1f2998176596fe5d775a70f533461ba327ad2c0" Sep 29 20:05:47 crc kubenswrapper[4779]: I0929 20:05:47.405272 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"21a4ce6a71931e7d01d9e289c1f2998176596fe5d775a70f533461ba327ad2c0"} err="failed to get container status \"21a4ce6a71931e7d01d9e289c1f2998176596fe5d775a70f533461ba327ad2c0\": rpc error: code = NotFound desc = could not find container \"21a4ce6a71931e7d01d9e289c1f2998176596fe5d775a70f533461ba327ad2c0\": container with ID starting with 21a4ce6a71931e7d01d9e289c1f2998176596fe5d775a70f533461ba327ad2c0 not found: ID does not exist" Sep 29 20:05:47 crc kubenswrapper[4779]: I0929 20:05:47.780584 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="dbb7e8f2-3a3f-40bd-9e5f-d2c09e3cbfd4" path="/var/lib/kubelet/pods/dbb7e8f2-3a3f-40bd-9e5f-d2c09e3cbfd4/volumes" Sep 29 20:05:54 crc kubenswrapper[4779]: I0929 20:05:54.136872 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-qx54v"] Sep 29 20:05:54 crc kubenswrapper[4779]: E0929 20:05:54.137688 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dbb7e8f2-3a3f-40bd-9e5f-d2c09e3cbfd4" containerName="extract-content" Sep 29 20:05:54 crc kubenswrapper[4779]: I0929 20:05:54.137714 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="dbb7e8f2-3a3f-40bd-9e5f-d2c09e3cbfd4" containerName="extract-content" Sep 29 20:05:54 crc kubenswrapper[4779]: E0929 20:05:54.137725 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dbb7e8f2-3a3f-40bd-9e5f-d2c09e3cbfd4" containerName="extract-utilities" Sep 29 20:05:54 crc kubenswrapper[4779]: I0929 20:05:54.137734 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="dbb7e8f2-3a3f-40bd-9e5f-d2c09e3cbfd4" containerName="extract-utilities" Sep 29 20:05:54 crc kubenswrapper[4779]: E0929 20:05:54.137741 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dbb7e8f2-3a3f-40bd-9e5f-d2c09e3cbfd4" containerName="registry-server" Sep 29 20:05:54 crc kubenswrapper[4779]: I0929 20:05:54.137748 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="dbb7e8f2-3a3f-40bd-9e5f-d2c09e3cbfd4" containerName="registry-server" Sep 29 20:05:54 crc kubenswrapper[4779]: I0929 20:05:54.137912 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="dbb7e8f2-3a3f-40bd-9e5f-d2c09e3cbfd4" containerName="registry-server" Sep 29 20:05:54 crc kubenswrapper[4779]: I0929 20:05:54.139216 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-qx54v" Sep 29 20:05:54 crc kubenswrapper[4779]: I0929 20:05:54.152131 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-qx54v"] Sep 29 20:05:54 crc kubenswrapper[4779]: I0929 20:05:54.312710 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/080d2544-78d8-4546-9e63-f251dd07d1bc-utilities\") pod \"redhat-operators-qx54v\" (UID: \"080d2544-78d8-4546-9e63-f251dd07d1bc\") " pod="openshift-marketplace/redhat-operators-qx54v" Sep 29 20:05:54 crc kubenswrapper[4779]: I0929 20:05:54.312960 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-97cqf\" (UniqueName: \"kubernetes.io/projected/080d2544-78d8-4546-9e63-f251dd07d1bc-kube-api-access-97cqf\") pod \"redhat-operators-qx54v\" (UID: \"080d2544-78d8-4546-9e63-f251dd07d1bc\") " pod="openshift-marketplace/redhat-operators-qx54v" Sep 29 20:05:54 crc kubenswrapper[4779]: I0929 20:05:54.313299 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/080d2544-78d8-4546-9e63-f251dd07d1bc-catalog-content\") pod \"redhat-operators-qx54v\" (UID: \"080d2544-78d8-4546-9e63-f251dd07d1bc\") " pod="openshift-marketplace/redhat-operators-qx54v" Sep 29 20:05:54 crc kubenswrapper[4779]: I0929 20:05:54.414936 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/080d2544-78d8-4546-9e63-f251dd07d1bc-catalog-content\") pod \"redhat-operators-qx54v\" (UID: \"080d2544-78d8-4546-9e63-f251dd07d1bc\") " pod="openshift-marketplace/redhat-operators-qx54v" Sep 29 20:05:54 crc kubenswrapper[4779]: I0929 20:05:54.415024 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/080d2544-78d8-4546-9e63-f251dd07d1bc-utilities\") pod \"redhat-operators-qx54v\" (UID: \"080d2544-78d8-4546-9e63-f251dd07d1bc\") " pod="openshift-marketplace/redhat-operators-qx54v" Sep 29 20:05:54 crc kubenswrapper[4779]: I0929 20:05:54.415084 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-97cqf\" (UniqueName: \"kubernetes.io/projected/080d2544-78d8-4546-9e63-f251dd07d1bc-kube-api-access-97cqf\") pod \"redhat-operators-qx54v\" (UID: \"080d2544-78d8-4546-9e63-f251dd07d1bc\") " pod="openshift-marketplace/redhat-operators-qx54v" Sep 29 20:05:54 crc kubenswrapper[4779]: I0929 20:05:54.415545 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/080d2544-78d8-4546-9e63-f251dd07d1bc-catalog-content\") pod \"redhat-operators-qx54v\" (UID: \"080d2544-78d8-4546-9e63-f251dd07d1bc\") " pod="openshift-marketplace/redhat-operators-qx54v" Sep 29 20:05:54 crc kubenswrapper[4779]: I0929 20:05:54.415691 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/080d2544-78d8-4546-9e63-f251dd07d1bc-utilities\") pod \"redhat-operators-qx54v\" (UID: \"080d2544-78d8-4546-9e63-f251dd07d1bc\") " pod="openshift-marketplace/redhat-operators-qx54v" Sep 29 20:05:54 crc kubenswrapper[4779]: I0929 20:05:54.438189 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-97cqf\" (UniqueName: \"kubernetes.io/projected/080d2544-78d8-4546-9e63-f251dd07d1bc-kube-api-access-97cqf\") pod \"redhat-operators-qx54v\" (UID: \"080d2544-78d8-4546-9e63-f251dd07d1bc\") " pod="openshift-marketplace/redhat-operators-qx54v" Sep 29 20:05:54 crc kubenswrapper[4779]: I0929 20:05:54.478747 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-qx54v" Sep 29 20:05:54 crc kubenswrapper[4779]: I0929 20:05:54.926685 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-qx54v"] Sep 29 20:05:54 crc kubenswrapper[4779]: W0929 20:05:54.944277 4779 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod080d2544_78d8_4546_9e63_f251dd07d1bc.slice/crio-3a31f95b597fcc4802d3bd43ef2af8c5b77746bb30f72a479a1d555dea924f62 WatchSource:0}: Error finding container 3a31f95b597fcc4802d3bd43ef2af8c5b77746bb30f72a479a1d555dea924f62: Status 404 returned error can't find the container with id 3a31f95b597fcc4802d3bd43ef2af8c5b77746bb30f72a479a1d555dea924f62 Sep 29 20:05:55 crc kubenswrapper[4779]: I0929 20:05:55.364389 4779 generic.go:334] "Generic (PLEG): container finished" podID="080d2544-78d8-4546-9e63-f251dd07d1bc" containerID="7016ac29db7d464f9f368f41bf91e799b3e51ad994f77754e481645e0a52802c" exitCode=0 Sep 29 20:05:55 crc kubenswrapper[4779]: I0929 20:05:55.364454 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-qx54v" event={"ID":"080d2544-78d8-4546-9e63-f251dd07d1bc","Type":"ContainerDied","Data":"7016ac29db7d464f9f368f41bf91e799b3e51ad994f77754e481645e0a52802c"} Sep 29 20:05:55 crc kubenswrapper[4779]: I0929 20:05:55.364660 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-qx54v" event={"ID":"080d2544-78d8-4546-9e63-f251dd07d1bc","Type":"ContainerStarted","Data":"3a31f95b597fcc4802d3bd43ef2af8c5b77746bb30f72a479a1d555dea924f62"} Sep 29 20:05:56 crc kubenswrapper[4779]: I0929 20:05:56.379608 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-qx54v" event={"ID":"080d2544-78d8-4546-9e63-f251dd07d1bc","Type":"ContainerStarted","Data":"0e2916f34f2dbb423872a9b310dc65e0ee0d1a75f96c0f1e510fe284ca28a89a"} Sep 29 20:05:58 crc kubenswrapper[4779]: I0929 20:05:58.424761 4779 generic.go:334] "Generic (PLEG): container finished" podID="080d2544-78d8-4546-9e63-f251dd07d1bc" containerID="0e2916f34f2dbb423872a9b310dc65e0ee0d1a75f96c0f1e510fe284ca28a89a" exitCode=0 Sep 29 20:05:58 crc kubenswrapper[4779]: I0929 20:05:58.424843 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-qx54v" event={"ID":"080d2544-78d8-4546-9e63-f251dd07d1bc","Type":"ContainerDied","Data":"0e2916f34f2dbb423872a9b310dc65e0ee0d1a75f96c0f1e510fe284ca28a89a"} Sep 29 20:05:59 crc kubenswrapper[4779]: I0929 20:05:59.448210 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-qx54v" event={"ID":"080d2544-78d8-4546-9e63-f251dd07d1bc","Type":"ContainerStarted","Data":"19f2dfa535f55152f77ba4592b5826c8f17609186f63e8e0af8d966a87495bfc"} Sep 29 20:05:59 crc kubenswrapper[4779]: I0929 20:05:59.483743 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-qx54v" podStartSLOduration=1.924996022 podStartE2EDuration="5.483637441s" podCreationTimestamp="2025-09-29 20:05:54 +0000 UTC" firstStartedPulling="2025-09-29 20:05:55.366280281 +0000 UTC m=+3466.250705381" lastFinishedPulling="2025-09-29 20:05:58.92492169 +0000 UTC m=+3469.809346800" observedRunningTime="2025-09-29 20:05:59.468465986 +0000 UTC m=+3470.352891116" watchObservedRunningTime="2025-09-29 20:05:59.483637441 +0000 UTC m=+3470.368062561" Sep 29 20:06:04 crc kubenswrapper[4779]: I0929 20:06:04.479274 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-qx54v" Sep 29 20:06:04 crc kubenswrapper[4779]: I0929 20:06:04.479976 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-qx54v" Sep 29 20:06:05 crc kubenswrapper[4779]: I0929 20:06:05.550290 4779 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-qx54v" podUID="080d2544-78d8-4546-9e63-f251dd07d1bc" containerName="registry-server" probeResult="failure" output=< Sep 29 20:06:05 crc kubenswrapper[4779]: timeout: failed to connect service ":50051" within 1s Sep 29 20:06:05 crc kubenswrapper[4779]: > Sep 29 20:06:13 crc kubenswrapper[4779]: I0929 20:06:13.590550 4779 generic.go:334] "Generic (PLEG): container finished" podID="8bf09edd-ad1f-4883-ade0-8082b2055f60" containerID="bf9dd8426e4e5b7efb6e15d9e65d28271c2cfeccddbf35f0cbb187d353b9ecd0" exitCode=0 Sep 29 20:06:13 crc kubenswrapper[4779]: I0929 20:06:13.590654 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest" event={"ID":"8bf09edd-ad1f-4883-ade0-8082b2055f60","Type":"ContainerDied","Data":"bf9dd8426e4e5b7efb6e15d9e65d28271c2cfeccddbf35f0cbb187d353b9ecd0"} Sep 29 20:06:14 crc kubenswrapper[4779]: I0929 20:06:14.585201 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-qx54v" Sep 29 20:06:14 crc kubenswrapper[4779]: I0929 20:06:14.658152 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-qx54v" Sep 29 20:06:14 crc kubenswrapper[4779]: I0929 20:06:14.832229 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-qx54v"] Sep 29 20:06:15 crc kubenswrapper[4779]: I0929 20:06:15.043126 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest" Sep 29 20:06:15 crc kubenswrapper[4779]: I0929 20:06:15.063276 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/8bf09edd-ad1f-4883-ade0-8082b2055f60-openstack-config\") pod \"8bf09edd-ad1f-4883-ade0-8082b2055f60\" (UID: \"8bf09edd-ad1f-4883-ade0-8082b2055f60\") " Sep 29 20:06:15 crc kubenswrapper[4779]: I0929 20:06:15.063805 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"test-operator-logs\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"8bf09edd-ad1f-4883-ade0-8082b2055f60\" (UID: \"8bf09edd-ad1f-4883-ade0-8082b2055f60\") " Sep 29 20:06:15 crc kubenswrapper[4779]: I0929 20:06:15.063873 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/8bf09edd-ad1f-4883-ade0-8082b2055f60-test-operator-ephemeral-temporary\") pod \"8bf09edd-ad1f-4883-ade0-8082b2055f60\" (UID: \"8bf09edd-ad1f-4883-ade0-8082b2055f60\") " Sep 29 20:06:15 crc kubenswrapper[4779]: I0929 20:06:15.063894 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8bf09edd-ad1f-4883-ade0-8082b2055f60-ssh-key\") pod \"8bf09edd-ad1f-4883-ade0-8082b2055f60\" (UID: \"8bf09edd-ad1f-4883-ade0-8082b2055f60\") " Sep 29 20:06:15 crc kubenswrapper[4779]: I0929 20:06:15.063960 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nljkg\" (UniqueName: \"kubernetes.io/projected/8bf09edd-ad1f-4883-ade0-8082b2055f60-kube-api-access-nljkg\") pod \"8bf09edd-ad1f-4883-ade0-8082b2055f60\" (UID: \"8bf09edd-ad1f-4883-ade0-8082b2055f60\") " Sep 29 20:06:15 crc kubenswrapper[4779]: I0929 20:06:15.064008 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/8bf09edd-ad1f-4883-ade0-8082b2055f60-test-operator-ephemeral-workdir\") pod \"8bf09edd-ad1f-4883-ade0-8082b2055f60\" (UID: \"8bf09edd-ad1f-4883-ade0-8082b2055f60\") " Sep 29 20:06:15 crc kubenswrapper[4779]: I0929 20:06:15.064028 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/8bf09edd-ad1f-4883-ade0-8082b2055f60-ca-certs\") pod \"8bf09edd-ad1f-4883-ade0-8082b2055f60\" (UID: \"8bf09edd-ad1f-4883-ade0-8082b2055f60\") " Sep 29 20:06:15 crc kubenswrapper[4779]: I0929 20:06:15.064049 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/8bf09edd-ad1f-4883-ade0-8082b2055f60-openstack-config-secret\") pod \"8bf09edd-ad1f-4883-ade0-8082b2055f60\" (UID: \"8bf09edd-ad1f-4883-ade0-8082b2055f60\") " Sep 29 20:06:15 crc kubenswrapper[4779]: I0929 20:06:15.064114 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/8bf09edd-ad1f-4883-ade0-8082b2055f60-config-data\") pod \"8bf09edd-ad1f-4883-ade0-8082b2055f60\" (UID: \"8bf09edd-ad1f-4883-ade0-8082b2055f60\") " Sep 29 20:06:15 crc kubenswrapper[4779]: I0929 20:06:15.064926 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8bf09edd-ad1f-4883-ade0-8082b2055f60-config-data" (OuterVolumeSpecName: "config-data") pod "8bf09edd-ad1f-4883-ade0-8082b2055f60" (UID: "8bf09edd-ad1f-4883-ade0-8082b2055f60"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 20:06:15 crc kubenswrapper[4779]: I0929 20:06:15.067291 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8bf09edd-ad1f-4883-ade0-8082b2055f60-test-operator-ephemeral-workdir" (OuterVolumeSpecName: "test-operator-ephemeral-workdir") pod "8bf09edd-ad1f-4883-ade0-8082b2055f60" (UID: "8bf09edd-ad1f-4883-ade0-8082b2055f60"). InnerVolumeSpecName "test-operator-ephemeral-workdir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 20:06:15 crc kubenswrapper[4779]: I0929 20:06:15.072358 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8bf09edd-ad1f-4883-ade0-8082b2055f60-test-operator-ephemeral-temporary" (OuterVolumeSpecName: "test-operator-ephemeral-temporary") pod "8bf09edd-ad1f-4883-ade0-8082b2055f60" (UID: "8bf09edd-ad1f-4883-ade0-8082b2055f60"). InnerVolumeSpecName "test-operator-ephemeral-temporary". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 20:06:15 crc kubenswrapper[4779]: I0929 20:06:15.077682 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage11-crc" (OuterVolumeSpecName: "test-operator-logs") pod "8bf09edd-ad1f-4883-ade0-8082b2055f60" (UID: "8bf09edd-ad1f-4883-ade0-8082b2055f60"). InnerVolumeSpecName "local-storage11-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Sep 29 20:06:15 crc kubenswrapper[4779]: I0929 20:06:15.090258 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8bf09edd-ad1f-4883-ade0-8082b2055f60-kube-api-access-nljkg" (OuterVolumeSpecName: "kube-api-access-nljkg") pod "8bf09edd-ad1f-4883-ade0-8082b2055f60" (UID: "8bf09edd-ad1f-4883-ade0-8082b2055f60"). InnerVolumeSpecName "kube-api-access-nljkg". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 20:06:15 crc kubenswrapper[4779]: I0929 20:06:15.121924 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8bf09edd-ad1f-4883-ade0-8082b2055f60-ca-certs" (OuterVolumeSpecName: "ca-certs") pod "8bf09edd-ad1f-4883-ade0-8082b2055f60" (UID: "8bf09edd-ad1f-4883-ade0-8082b2055f60"). InnerVolumeSpecName "ca-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 20:06:15 crc kubenswrapper[4779]: I0929 20:06:15.125978 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8bf09edd-ad1f-4883-ade0-8082b2055f60-openstack-config" (OuterVolumeSpecName: "openstack-config") pod "8bf09edd-ad1f-4883-ade0-8082b2055f60" (UID: "8bf09edd-ad1f-4883-ade0-8082b2055f60"). InnerVolumeSpecName "openstack-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 20:06:15 crc kubenswrapper[4779]: I0929 20:06:15.134948 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8bf09edd-ad1f-4883-ade0-8082b2055f60-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "8bf09edd-ad1f-4883-ade0-8082b2055f60" (UID: "8bf09edd-ad1f-4883-ade0-8082b2055f60"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 20:06:15 crc kubenswrapper[4779]: I0929 20:06:15.139699 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8bf09edd-ad1f-4883-ade0-8082b2055f60-openstack-config-secret" (OuterVolumeSpecName: "openstack-config-secret") pod "8bf09edd-ad1f-4883-ade0-8082b2055f60" (UID: "8bf09edd-ad1f-4883-ade0-8082b2055f60"). InnerVolumeSpecName "openstack-config-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 20:06:15 crc kubenswrapper[4779]: I0929 20:06:15.165929 4779 reconciler_common.go:293] "Volume detached for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/8bf09edd-ad1f-4883-ade0-8082b2055f60-openstack-config\") on node \"crc\" DevicePath \"\"" Sep 29 20:06:15 crc kubenswrapper[4779]: I0929 20:06:15.165996 4779 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") on node \"crc\" " Sep 29 20:06:15 crc kubenswrapper[4779]: I0929 20:06:15.166012 4779 reconciler_common.go:293] "Volume detached for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/8bf09edd-ad1f-4883-ade0-8082b2055f60-test-operator-ephemeral-temporary\") on node \"crc\" DevicePath \"\"" Sep 29 20:06:15 crc kubenswrapper[4779]: I0929 20:06:15.166028 4779 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8bf09edd-ad1f-4883-ade0-8082b2055f60-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 29 20:06:15 crc kubenswrapper[4779]: I0929 20:06:15.166041 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nljkg\" (UniqueName: \"kubernetes.io/projected/8bf09edd-ad1f-4883-ade0-8082b2055f60-kube-api-access-nljkg\") on node \"crc\" DevicePath \"\"" Sep 29 20:06:15 crc kubenswrapper[4779]: I0929 20:06:15.166055 4779 reconciler_common.go:293] "Volume detached for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/8bf09edd-ad1f-4883-ade0-8082b2055f60-test-operator-ephemeral-workdir\") on node \"crc\" DevicePath \"\"" Sep 29 20:06:15 crc kubenswrapper[4779]: I0929 20:06:15.166067 4779 reconciler_common.go:293] "Volume detached for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/8bf09edd-ad1f-4883-ade0-8082b2055f60-ca-certs\") on node \"crc\" DevicePath \"\"" Sep 29 20:06:15 crc kubenswrapper[4779]: I0929 20:06:15.166081 4779 reconciler_common.go:293] "Volume detached for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/8bf09edd-ad1f-4883-ade0-8082b2055f60-openstack-config-secret\") on node \"crc\" DevicePath \"\"" Sep 29 20:06:15 crc kubenswrapper[4779]: I0929 20:06:15.166093 4779 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/8bf09edd-ad1f-4883-ade0-8082b2055f60-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 20:06:15 crc kubenswrapper[4779]: I0929 20:06:15.197480 4779 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage11-crc" (UniqueName: "kubernetes.io/local-volume/local-storage11-crc") on node "crc" Sep 29 20:06:15 crc kubenswrapper[4779]: I0929 20:06:15.267425 4779 reconciler_common.go:293] "Volume detached for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") on node \"crc\" DevicePath \"\"" Sep 29 20:06:15 crc kubenswrapper[4779]: I0929 20:06:15.617204 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-qx54v" podUID="080d2544-78d8-4546-9e63-f251dd07d1bc" containerName="registry-server" containerID="cri-o://19f2dfa535f55152f77ba4592b5826c8f17609186f63e8e0af8d966a87495bfc" gracePeriod=2 Sep 29 20:06:15 crc kubenswrapper[4779]: I0929 20:06:15.617551 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest" event={"ID":"8bf09edd-ad1f-4883-ade0-8082b2055f60","Type":"ContainerDied","Data":"7f479f4c94155f7586cfbb03066fb739e46acfdbd771b9c241a12d09914a9dfd"} Sep 29 20:06:15 crc kubenswrapper[4779]: I0929 20:06:15.618118 4779 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7f479f4c94155f7586cfbb03066fb739e46acfdbd771b9c241a12d09914a9dfd" Sep 29 20:06:15 crc kubenswrapper[4779]: I0929 20:06:15.617606 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest" Sep 29 20:06:16 crc kubenswrapper[4779]: I0929 20:06:16.130095 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-qx54v" Sep 29 20:06:16 crc kubenswrapper[4779]: I0929 20:06:16.185534 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-97cqf\" (UniqueName: \"kubernetes.io/projected/080d2544-78d8-4546-9e63-f251dd07d1bc-kube-api-access-97cqf\") pod \"080d2544-78d8-4546-9e63-f251dd07d1bc\" (UID: \"080d2544-78d8-4546-9e63-f251dd07d1bc\") " Sep 29 20:06:16 crc kubenswrapper[4779]: I0929 20:06:16.185718 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/080d2544-78d8-4546-9e63-f251dd07d1bc-utilities\") pod \"080d2544-78d8-4546-9e63-f251dd07d1bc\" (UID: \"080d2544-78d8-4546-9e63-f251dd07d1bc\") " Sep 29 20:06:16 crc kubenswrapper[4779]: I0929 20:06:16.185913 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/080d2544-78d8-4546-9e63-f251dd07d1bc-catalog-content\") pod \"080d2544-78d8-4546-9e63-f251dd07d1bc\" (UID: \"080d2544-78d8-4546-9e63-f251dd07d1bc\") " Sep 29 20:06:16 crc kubenswrapper[4779]: I0929 20:06:16.186769 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/080d2544-78d8-4546-9e63-f251dd07d1bc-utilities" (OuterVolumeSpecName: "utilities") pod "080d2544-78d8-4546-9e63-f251dd07d1bc" (UID: "080d2544-78d8-4546-9e63-f251dd07d1bc"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 20:06:16 crc kubenswrapper[4779]: I0929 20:06:16.187011 4779 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/080d2544-78d8-4546-9e63-f251dd07d1bc-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 20:06:16 crc kubenswrapper[4779]: I0929 20:06:16.190193 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/080d2544-78d8-4546-9e63-f251dd07d1bc-kube-api-access-97cqf" (OuterVolumeSpecName: "kube-api-access-97cqf") pod "080d2544-78d8-4546-9e63-f251dd07d1bc" (UID: "080d2544-78d8-4546-9e63-f251dd07d1bc"). InnerVolumeSpecName "kube-api-access-97cqf". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 20:06:16 crc kubenswrapper[4779]: I0929 20:06:16.288757 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-97cqf\" (UniqueName: \"kubernetes.io/projected/080d2544-78d8-4546-9e63-f251dd07d1bc-kube-api-access-97cqf\") on node \"crc\" DevicePath \"\"" Sep 29 20:06:16 crc kubenswrapper[4779]: I0929 20:06:16.296023 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/080d2544-78d8-4546-9e63-f251dd07d1bc-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "080d2544-78d8-4546-9e63-f251dd07d1bc" (UID: "080d2544-78d8-4546-9e63-f251dd07d1bc"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 20:06:16 crc kubenswrapper[4779]: I0929 20:06:16.390839 4779 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/080d2544-78d8-4546-9e63-f251dd07d1bc-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 20:06:16 crc kubenswrapper[4779]: I0929 20:06:16.630126 4779 generic.go:334] "Generic (PLEG): container finished" podID="080d2544-78d8-4546-9e63-f251dd07d1bc" containerID="19f2dfa535f55152f77ba4592b5826c8f17609186f63e8e0af8d966a87495bfc" exitCode=0 Sep 29 20:06:16 crc kubenswrapper[4779]: I0929 20:06:16.630172 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-qx54v" event={"ID":"080d2544-78d8-4546-9e63-f251dd07d1bc","Type":"ContainerDied","Data":"19f2dfa535f55152f77ba4592b5826c8f17609186f63e8e0af8d966a87495bfc"} Sep 29 20:06:16 crc kubenswrapper[4779]: I0929 20:06:16.630198 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-qx54v" event={"ID":"080d2544-78d8-4546-9e63-f251dd07d1bc","Type":"ContainerDied","Data":"3a31f95b597fcc4802d3bd43ef2af8c5b77746bb30f72a479a1d555dea924f62"} Sep 29 20:06:16 crc kubenswrapper[4779]: I0929 20:06:16.630217 4779 scope.go:117] "RemoveContainer" containerID="19f2dfa535f55152f77ba4592b5826c8f17609186f63e8e0af8d966a87495bfc" Sep 29 20:06:16 crc kubenswrapper[4779]: I0929 20:06:16.630640 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-qx54v" Sep 29 20:06:16 crc kubenswrapper[4779]: I0929 20:06:16.659670 4779 scope.go:117] "RemoveContainer" containerID="0e2916f34f2dbb423872a9b310dc65e0ee0d1a75f96c0f1e510fe284ca28a89a" Sep 29 20:06:16 crc kubenswrapper[4779]: I0929 20:06:16.667061 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-qx54v"] Sep 29 20:06:16 crc kubenswrapper[4779]: I0929 20:06:16.681712 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-qx54v"] Sep 29 20:06:16 crc kubenswrapper[4779]: I0929 20:06:16.686763 4779 scope.go:117] "RemoveContainer" containerID="7016ac29db7d464f9f368f41bf91e799b3e51ad994f77754e481645e0a52802c" Sep 29 20:06:16 crc kubenswrapper[4779]: I0929 20:06:16.730937 4779 scope.go:117] "RemoveContainer" containerID="19f2dfa535f55152f77ba4592b5826c8f17609186f63e8e0af8d966a87495bfc" Sep 29 20:06:16 crc kubenswrapper[4779]: E0929 20:06:16.731464 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"19f2dfa535f55152f77ba4592b5826c8f17609186f63e8e0af8d966a87495bfc\": container with ID starting with 19f2dfa535f55152f77ba4592b5826c8f17609186f63e8e0af8d966a87495bfc not found: ID does not exist" containerID="19f2dfa535f55152f77ba4592b5826c8f17609186f63e8e0af8d966a87495bfc" Sep 29 20:06:16 crc kubenswrapper[4779]: I0929 20:06:16.731551 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"19f2dfa535f55152f77ba4592b5826c8f17609186f63e8e0af8d966a87495bfc"} err="failed to get container status \"19f2dfa535f55152f77ba4592b5826c8f17609186f63e8e0af8d966a87495bfc\": rpc error: code = NotFound desc = could not find container \"19f2dfa535f55152f77ba4592b5826c8f17609186f63e8e0af8d966a87495bfc\": container with ID starting with 19f2dfa535f55152f77ba4592b5826c8f17609186f63e8e0af8d966a87495bfc not found: ID does not exist" Sep 29 20:06:16 crc kubenswrapper[4779]: I0929 20:06:16.731628 4779 scope.go:117] "RemoveContainer" containerID="0e2916f34f2dbb423872a9b310dc65e0ee0d1a75f96c0f1e510fe284ca28a89a" Sep 29 20:06:16 crc kubenswrapper[4779]: E0929 20:06:16.731917 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0e2916f34f2dbb423872a9b310dc65e0ee0d1a75f96c0f1e510fe284ca28a89a\": container with ID starting with 0e2916f34f2dbb423872a9b310dc65e0ee0d1a75f96c0f1e510fe284ca28a89a not found: ID does not exist" containerID="0e2916f34f2dbb423872a9b310dc65e0ee0d1a75f96c0f1e510fe284ca28a89a" Sep 29 20:06:16 crc kubenswrapper[4779]: I0929 20:06:16.732003 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0e2916f34f2dbb423872a9b310dc65e0ee0d1a75f96c0f1e510fe284ca28a89a"} err="failed to get container status \"0e2916f34f2dbb423872a9b310dc65e0ee0d1a75f96c0f1e510fe284ca28a89a\": rpc error: code = NotFound desc = could not find container \"0e2916f34f2dbb423872a9b310dc65e0ee0d1a75f96c0f1e510fe284ca28a89a\": container with ID starting with 0e2916f34f2dbb423872a9b310dc65e0ee0d1a75f96c0f1e510fe284ca28a89a not found: ID does not exist" Sep 29 20:06:16 crc kubenswrapper[4779]: I0929 20:06:16.732086 4779 scope.go:117] "RemoveContainer" containerID="7016ac29db7d464f9f368f41bf91e799b3e51ad994f77754e481645e0a52802c" Sep 29 20:06:16 crc kubenswrapper[4779]: E0929 20:06:16.732502 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7016ac29db7d464f9f368f41bf91e799b3e51ad994f77754e481645e0a52802c\": container with ID starting with 7016ac29db7d464f9f368f41bf91e799b3e51ad994f77754e481645e0a52802c not found: ID does not exist" containerID="7016ac29db7d464f9f368f41bf91e799b3e51ad994f77754e481645e0a52802c" Sep 29 20:06:16 crc kubenswrapper[4779]: I0929 20:06:16.732583 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7016ac29db7d464f9f368f41bf91e799b3e51ad994f77754e481645e0a52802c"} err="failed to get container status \"7016ac29db7d464f9f368f41bf91e799b3e51ad994f77754e481645e0a52802c\": rpc error: code = NotFound desc = could not find container \"7016ac29db7d464f9f368f41bf91e799b3e51ad994f77754e481645e0a52802c\": container with ID starting with 7016ac29db7d464f9f368f41bf91e799b3e51ad994f77754e481645e0a52802c not found: ID does not exist" Sep 29 20:06:17 crc kubenswrapper[4779]: I0929 20:06:17.793116 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="080d2544-78d8-4546-9e63-f251dd07d1bc" path="/var/lib/kubelet/pods/080d2544-78d8-4546-9e63-f251dd07d1bc/volumes" Sep 29 20:06:21 crc kubenswrapper[4779]: I0929 20:06:21.539237 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/test-operator-logs-pod-tempest-tempest-tests-tempest"] Sep 29 20:06:21 crc kubenswrapper[4779]: E0929 20:06:21.540925 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="080d2544-78d8-4546-9e63-f251dd07d1bc" containerName="registry-server" Sep 29 20:06:21 crc kubenswrapper[4779]: I0929 20:06:21.540958 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="080d2544-78d8-4546-9e63-f251dd07d1bc" containerName="registry-server" Sep 29 20:06:21 crc kubenswrapper[4779]: E0929 20:06:21.541000 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8bf09edd-ad1f-4883-ade0-8082b2055f60" containerName="tempest-tests-tempest-tests-runner" Sep 29 20:06:21 crc kubenswrapper[4779]: I0929 20:06:21.541021 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="8bf09edd-ad1f-4883-ade0-8082b2055f60" containerName="tempest-tests-tempest-tests-runner" Sep 29 20:06:21 crc kubenswrapper[4779]: E0929 20:06:21.541074 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="080d2544-78d8-4546-9e63-f251dd07d1bc" containerName="extract-content" Sep 29 20:06:21 crc kubenswrapper[4779]: I0929 20:06:21.541090 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="080d2544-78d8-4546-9e63-f251dd07d1bc" containerName="extract-content" Sep 29 20:06:21 crc kubenswrapper[4779]: E0929 20:06:21.541138 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="080d2544-78d8-4546-9e63-f251dd07d1bc" containerName="extract-utilities" Sep 29 20:06:21 crc kubenswrapper[4779]: I0929 20:06:21.541155 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="080d2544-78d8-4546-9e63-f251dd07d1bc" containerName="extract-utilities" Sep 29 20:06:21 crc kubenswrapper[4779]: I0929 20:06:21.542275 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="8bf09edd-ad1f-4883-ade0-8082b2055f60" containerName="tempest-tests-tempest-tests-runner" Sep 29 20:06:21 crc kubenswrapper[4779]: I0929 20:06:21.542391 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="080d2544-78d8-4546-9e63-f251dd07d1bc" containerName="registry-server" Sep 29 20:06:21 crc kubenswrapper[4779]: I0929 20:06:21.543841 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Sep 29 20:06:21 crc kubenswrapper[4779]: I0929 20:06:21.546829 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"default-dockercfg-27mvk" Sep 29 20:06:21 crc kubenswrapper[4779]: I0929 20:06:21.570993 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/test-operator-logs-pod-tempest-tempest-tests-tempest"] Sep 29 20:06:21 crc kubenswrapper[4779]: I0929 20:06:21.594440 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"ab44e769-9e8e-4cec-8fa4-97f93b25dd8c\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Sep 29 20:06:21 crc kubenswrapper[4779]: I0929 20:06:21.594498 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g9lsw\" (UniqueName: \"kubernetes.io/projected/ab44e769-9e8e-4cec-8fa4-97f93b25dd8c-kube-api-access-g9lsw\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"ab44e769-9e8e-4cec-8fa4-97f93b25dd8c\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Sep 29 20:06:21 crc kubenswrapper[4779]: I0929 20:06:21.696605 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"ab44e769-9e8e-4cec-8fa4-97f93b25dd8c\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Sep 29 20:06:21 crc kubenswrapper[4779]: I0929 20:06:21.696679 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g9lsw\" (UniqueName: \"kubernetes.io/projected/ab44e769-9e8e-4cec-8fa4-97f93b25dd8c-kube-api-access-g9lsw\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"ab44e769-9e8e-4cec-8fa4-97f93b25dd8c\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Sep 29 20:06:21 crc kubenswrapper[4779]: I0929 20:06:21.697520 4779 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"ab44e769-9e8e-4cec-8fa4-97f93b25dd8c\") device mount path \"/mnt/openstack/pv11\"" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Sep 29 20:06:21 crc kubenswrapper[4779]: I0929 20:06:21.723131 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g9lsw\" (UniqueName: \"kubernetes.io/projected/ab44e769-9e8e-4cec-8fa4-97f93b25dd8c-kube-api-access-g9lsw\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"ab44e769-9e8e-4cec-8fa4-97f93b25dd8c\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Sep 29 20:06:21 crc kubenswrapper[4779]: I0929 20:06:21.739524 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"ab44e769-9e8e-4cec-8fa4-97f93b25dd8c\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Sep 29 20:06:21 crc kubenswrapper[4779]: I0929 20:06:21.878027 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Sep 29 20:06:22 crc kubenswrapper[4779]: I0929 20:06:22.338379 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/test-operator-logs-pod-tempest-tempest-tests-tempest"] Sep 29 20:06:22 crc kubenswrapper[4779]: I0929 20:06:22.697286 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" event={"ID":"ab44e769-9e8e-4cec-8fa4-97f93b25dd8c","Type":"ContainerStarted","Data":"58d8ec413adb80156b82fb9f31d537c68c895c5de7cf6b18c1a81a65a21eddaa"} Sep 29 20:06:23 crc kubenswrapper[4779]: I0929 20:06:23.711565 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" event={"ID":"ab44e769-9e8e-4cec-8fa4-97f93b25dd8c","Type":"ContainerStarted","Data":"8c99158bc3555926ee9c390683fc4a349e50ffff537a7aa9b27c41be0fbe9fd2"} Sep 29 20:06:23 crc kubenswrapper[4779]: I0929 20:06:23.744455 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" podStartSLOduration=1.9772987450000001 podStartE2EDuration="2.744428997s" podCreationTimestamp="2025-09-29 20:06:21 +0000 UTC" firstStartedPulling="2025-09-29 20:06:22.338419295 +0000 UTC m=+3493.222844425" lastFinishedPulling="2025-09-29 20:06:23.105549577 +0000 UTC m=+3493.989974677" observedRunningTime="2025-09-29 20:06:23.736514801 +0000 UTC m=+3494.620939941" watchObservedRunningTime="2025-09-29 20:06:23.744428997 +0000 UTC m=+3494.628854127" Sep 29 20:06:41 crc kubenswrapper[4779]: I0929 20:06:41.174423 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-jnvd9/must-gather-44n2z"] Sep 29 20:06:41 crc kubenswrapper[4779]: I0929 20:06:41.176852 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-jnvd9/must-gather-44n2z" Sep 29 20:06:41 crc kubenswrapper[4779]: I0929 20:06:41.184483 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-must-gather-jnvd9"/"default-dockercfg-d8pnh" Sep 29 20:06:41 crc kubenswrapper[4779]: I0929 20:06:41.184511 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-jnvd9"/"kube-root-ca.crt" Sep 29 20:06:41 crc kubenswrapper[4779]: I0929 20:06:41.184517 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-jnvd9"/"openshift-service-ca.crt" Sep 29 20:06:41 crc kubenswrapper[4779]: I0929 20:06:41.184609 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-jnvd9/must-gather-44n2z"] Sep 29 20:06:41 crc kubenswrapper[4779]: I0929 20:06:41.228361 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4mlz5\" (UniqueName: \"kubernetes.io/projected/4636abcd-6c3b-451c-be55-f51d93252d3f-kube-api-access-4mlz5\") pod \"must-gather-44n2z\" (UID: \"4636abcd-6c3b-451c-be55-f51d93252d3f\") " pod="openshift-must-gather-jnvd9/must-gather-44n2z" Sep 29 20:06:41 crc kubenswrapper[4779]: I0929 20:06:41.228449 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/4636abcd-6c3b-451c-be55-f51d93252d3f-must-gather-output\") pod \"must-gather-44n2z\" (UID: \"4636abcd-6c3b-451c-be55-f51d93252d3f\") " pod="openshift-must-gather-jnvd9/must-gather-44n2z" Sep 29 20:06:41 crc kubenswrapper[4779]: I0929 20:06:41.330038 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/4636abcd-6c3b-451c-be55-f51d93252d3f-must-gather-output\") pod \"must-gather-44n2z\" (UID: \"4636abcd-6c3b-451c-be55-f51d93252d3f\") " pod="openshift-must-gather-jnvd9/must-gather-44n2z" Sep 29 20:06:41 crc kubenswrapper[4779]: I0929 20:06:41.330175 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4mlz5\" (UniqueName: \"kubernetes.io/projected/4636abcd-6c3b-451c-be55-f51d93252d3f-kube-api-access-4mlz5\") pod \"must-gather-44n2z\" (UID: \"4636abcd-6c3b-451c-be55-f51d93252d3f\") " pod="openshift-must-gather-jnvd9/must-gather-44n2z" Sep 29 20:06:41 crc kubenswrapper[4779]: I0929 20:06:41.331029 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/4636abcd-6c3b-451c-be55-f51d93252d3f-must-gather-output\") pod \"must-gather-44n2z\" (UID: \"4636abcd-6c3b-451c-be55-f51d93252d3f\") " pod="openshift-must-gather-jnvd9/must-gather-44n2z" Sep 29 20:06:41 crc kubenswrapper[4779]: I0929 20:06:41.369884 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4mlz5\" (UniqueName: \"kubernetes.io/projected/4636abcd-6c3b-451c-be55-f51d93252d3f-kube-api-access-4mlz5\") pod \"must-gather-44n2z\" (UID: \"4636abcd-6c3b-451c-be55-f51d93252d3f\") " pod="openshift-must-gather-jnvd9/must-gather-44n2z" Sep 29 20:06:41 crc kubenswrapper[4779]: I0929 20:06:41.496599 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-jnvd9/must-gather-44n2z" Sep 29 20:06:41 crc kubenswrapper[4779]: I0929 20:06:41.948895 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-jnvd9/must-gather-44n2z"] Sep 29 20:06:41 crc kubenswrapper[4779]: W0929 20:06:41.953663 4779 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4636abcd_6c3b_451c_be55_f51d93252d3f.slice/crio-a0c90844a776598819a5e0f6eb1053562a845a3b59aedaf34f151577a3a931a3 WatchSource:0}: Error finding container a0c90844a776598819a5e0f6eb1053562a845a3b59aedaf34f151577a3a931a3: Status 404 returned error can't find the container with id a0c90844a776598819a5e0f6eb1053562a845a3b59aedaf34f151577a3a931a3 Sep 29 20:06:42 crc kubenswrapper[4779]: I0929 20:06:42.918786 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-jnvd9/must-gather-44n2z" event={"ID":"4636abcd-6c3b-451c-be55-f51d93252d3f","Type":"ContainerStarted","Data":"a0c90844a776598819a5e0f6eb1053562a845a3b59aedaf34f151577a3a931a3"} Sep 29 20:06:46 crc kubenswrapper[4779]: I0929 20:06:46.984922 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-jnvd9/must-gather-44n2z" event={"ID":"4636abcd-6c3b-451c-be55-f51d93252d3f","Type":"ContainerStarted","Data":"902d1e8a653c6c0ddaf0546c3936c5f2fb7e9a6ddc0f5cc0d8a5c61fe747698f"} Sep 29 20:06:46 crc kubenswrapper[4779]: I0929 20:06:46.985723 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-jnvd9/must-gather-44n2z" event={"ID":"4636abcd-6c3b-451c-be55-f51d93252d3f","Type":"ContainerStarted","Data":"111dcbc3a8e2986b20f582f98b90ad771a363aafb71dff8b6325d4ba5f504e0f"} Sep 29 20:06:47 crc kubenswrapper[4779]: I0929 20:06:47.012333 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-jnvd9/must-gather-44n2z" podStartSLOduration=2.212610101 podStartE2EDuration="6.012295153s" podCreationTimestamp="2025-09-29 20:06:41 +0000 UTC" firstStartedPulling="2025-09-29 20:06:41.966666489 +0000 UTC m=+3512.851091589" lastFinishedPulling="2025-09-29 20:06:45.766351501 +0000 UTC m=+3516.650776641" observedRunningTime="2025-09-29 20:06:47.002241218 +0000 UTC m=+3517.886666328" watchObservedRunningTime="2025-09-29 20:06:47.012295153 +0000 UTC m=+3517.896720253" Sep 29 20:06:49 crc kubenswrapper[4779]: I0929 20:06:49.502225 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-jnvd9/crc-debug-k6p64"] Sep 29 20:06:49 crc kubenswrapper[4779]: I0929 20:06:49.505255 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-jnvd9/crc-debug-k6p64" Sep 29 20:06:49 crc kubenswrapper[4779]: I0929 20:06:49.592388 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/3e68ecff-bd00-4be9-a41b-ef164bf13041-host\") pod \"crc-debug-k6p64\" (UID: \"3e68ecff-bd00-4be9-a41b-ef164bf13041\") " pod="openshift-must-gather-jnvd9/crc-debug-k6p64" Sep 29 20:06:49 crc kubenswrapper[4779]: I0929 20:06:49.592723 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kb88f\" (UniqueName: \"kubernetes.io/projected/3e68ecff-bd00-4be9-a41b-ef164bf13041-kube-api-access-kb88f\") pod \"crc-debug-k6p64\" (UID: \"3e68ecff-bd00-4be9-a41b-ef164bf13041\") " pod="openshift-must-gather-jnvd9/crc-debug-k6p64" Sep 29 20:06:49 crc kubenswrapper[4779]: I0929 20:06:49.694462 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/3e68ecff-bd00-4be9-a41b-ef164bf13041-host\") pod \"crc-debug-k6p64\" (UID: \"3e68ecff-bd00-4be9-a41b-ef164bf13041\") " pod="openshift-must-gather-jnvd9/crc-debug-k6p64" Sep 29 20:06:49 crc kubenswrapper[4779]: I0929 20:06:49.694527 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kb88f\" (UniqueName: \"kubernetes.io/projected/3e68ecff-bd00-4be9-a41b-ef164bf13041-kube-api-access-kb88f\") pod \"crc-debug-k6p64\" (UID: \"3e68ecff-bd00-4be9-a41b-ef164bf13041\") " pod="openshift-must-gather-jnvd9/crc-debug-k6p64" Sep 29 20:06:49 crc kubenswrapper[4779]: I0929 20:06:49.694642 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/3e68ecff-bd00-4be9-a41b-ef164bf13041-host\") pod \"crc-debug-k6p64\" (UID: \"3e68ecff-bd00-4be9-a41b-ef164bf13041\") " pod="openshift-must-gather-jnvd9/crc-debug-k6p64" Sep 29 20:06:49 crc kubenswrapper[4779]: I0929 20:06:49.713967 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kb88f\" (UniqueName: \"kubernetes.io/projected/3e68ecff-bd00-4be9-a41b-ef164bf13041-kube-api-access-kb88f\") pod \"crc-debug-k6p64\" (UID: \"3e68ecff-bd00-4be9-a41b-ef164bf13041\") " pod="openshift-must-gather-jnvd9/crc-debug-k6p64" Sep 29 20:06:49 crc kubenswrapper[4779]: I0929 20:06:49.825735 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-jnvd9/crc-debug-k6p64" Sep 29 20:06:50 crc kubenswrapper[4779]: I0929 20:06:50.010624 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-jnvd9/crc-debug-k6p64" event={"ID":"3e68ecff-bd00-4be9-a41b-ef164bf13041","Type":"ContainerStarted","Data":"3f0d5b48df8ee5bbb6c9d8b6ebf872c9b265e7457888c27eea156e052dc6f522"} Sep 29 20:07:01 crc kubenswrapper[4779]: I0929 20:07:01.125389 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-jnvd9/crc-debug-k6p64" event={"ID":"3e68ecff-bd00-4be9-a41b-ef164bf13041","Type":"ContainerStarted","Data":"2b1b5e83d4e4b3f186a56de03b4f18e1eeee2f49cc9fb0ca8a29c7efd2a66a7c"} Sep 29 20:07:01 crc kubenswrapper[4779]: I0929 20:07:01.143976 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-jnvd9/crc-debug-k6p64" podStartSLOduration=1.861300962 podStartE2EDuration="12.143959397s" podCreationTimestamp="2025-09-29 20:06:49 +0000 UTC" firstStartedPulling="2025-09-29 20:06:49.871337953 +0000 UTC m=+3520.755763053" lastFinishedPulling="2025-09-29 20:07:00.153996388 +0000 UTC m=+3531.038421488" observedRunningTime="2025-09-29 20:07:01.138246051 +0000 UTC m=+3532.022671161" watchObservedRunningTime="2025-09-29 20:07:01.143959397 +0000 UTC m=+3532.028384497" Sep 29 20:07:43 crc kubenswrapper[4779]: I0929 20:07:43.784919 4779 patch_prober.go:28] interesting pod/machine-config-daemon-d5cnr container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 20:07:43 crc kubenswrapper[4779]: I0929 20:07:43.785525 4779 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" podUID="476bc421-1113-455e-bcc8-e207e47dad19" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 20:07:48 crc kubenswrapper[4779]: I0929 20:07:48.733191 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-f48875c8b-24729_06187c54-071a-4a20-adc1-84627f949933/barbican-api-log/0.log" Sep 29 20:07:48 crc kubenswrapper[4779]: I0929 20:07:48.756328 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-f48875c8b-24729_06187c54-071a-4a20-adc1-84627f949933/barbican-api/0.log" Sep 29 20:07:48 crc kubenswrapper[4779]: I0929 20:07:48.929537 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-58f6cf64bb-t8fdp_d3365fba-7e29-4f75-aa74-67ffd7275a15/barbican-keystone-listener/0.log" Sep 29 20:07:48 crc kubenswrapper[4779]: I0929 20:07:48.972859 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-58f6cf64bb-t8fdp_d3365fba-7e29-4f75-aa74-67ffd7275a15/barbican-keystone-listener-log/0.log" Sep 29 20:07:49 crc kubenswrapper[4779]: I0929 20:07:49.155000 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-7bd9b84c75-dphls_7f98eb0b-f8cf-42b7-a6f7-5a803c2be72b/barbican-worker/0.log" Sep 29 20:07:49 crc kubenswrapper[4779]: I0929 20:07:49.208863 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-7bd9b84c75-dphls_7f98eb0b-f8cf-42b7-a6f7-5a803c2be72b/barbican-worker-log/0.log" Sep 29 20:07:49 crc kubenswrapper[4779]: I0929 20:07:49.408122 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_bootstrap-edpm-deployment-openstack-edpm-ipam-d25cs_8beecba1-5edc-4f95-a9ad-49889c62c0ae/bootstrap-edpm-deployment-openstack-edpm-ipam/0.log" Sep 29 20:07:49 crc kubenswrapper[4779]: I0929 20:07:49.597840 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_743a0275-ddbc-4917-adf2-0d268c8fe08b/ceilometer-central-agent/0.log" Sep 29 20:07:49 crc kubenswrapper[4779]: I0929 20:07:49.634115 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_743a0275-ddbc-4917-adf2-0d268c8fe08b/ceilometer-notification-agent/0.log" Sep 29 20:07:49 crc kubenswrapper[4779]: I0929 20:07:49.677310 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_743a0275-ddbc-4917-adf2-0d268c8fe08b/proxy-httpd/0.log" Sep 29 20:07:49 crc kubenswrapper[4779]: I0929 20:07:49.781737 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_743a0275-ddbc-4917-adf2-0d268c8fe08b/sg-core/0.log" Sep 29 20:07:49 crc kubenswrapper[4779]: I0929 20:07:49.922105 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_e063f6a5-dcd3-413d-bb65-e9ceeca73df0/cinder-api/0.log" Sep 29 20:07:49 crc kubenswrapper[4779]: I0929 20:07:49.981285 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_e063f6a5-dcd3-413d-bb65-e9ceeca73df0/cinder-api-log/0.log" Sep 29 20:07:50 crc kubenswrapper[4779]: I0929 20:07:50.137973 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_a6f49188-efdd-4f27-ad02-4656f2cf5d11/cinder-scheduler/0.log" Sep 29 20:07:50 crc kubenswrapper[4779]: I0929 20:07:50.201070 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_a6f49188-efdd-4f27-ad02-4656f2cf5d11/probe/0.log" Sep 29 20:07:50 crc kubenswrapper[4779]: I0929 20:07:50.338002 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-network-edpm-deployment-openstack-edpm-ipam-c66cd_02971c6b-be51-4634-b3a0-661125814bea/configure-network-edpm-deployment-openstack-edpm-ipam/0.log" Sep 29 20:07:50 crc kubenswrapper[4779]: I0929 20:07:50.588260 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-os-edpm-deployment-openstack-edpm-ipam-xxmr6_e2235a29-5c01-4d29-a4cb-97f0abe8ca63/configure-os-edpm-deployment-openstack-edpm-ipam/0.log" Sep 29 20:07:50 crc kubenswrapper[4779]: I0929 20:07:50.642639 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-cb6ffcf87-x2rzr_5e41bca4-c1c5-4a1f-b5a2-4f1d2af086a4/init/0.log" Sep 29 20:07:50 crc kubenswrapper[4779]: I0929 20:07:50.869955 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-cb6ffcf87-x2rzr_5e41bca4-c1c5-4a1f-b5a2-4f1d2af086a4/dnsmasq-dns/0.log" Sep 29 20:07:50 crc kubenswrapper[4779]: I0929 20:07:50.875429 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-cb6ffcf87-x2rzr_5e41bca4-c1c5-4a1f-b5a2-4f1d2af086a4/init/0.log" Sep 29 20:07:51 crc kubenswrapper[4779]: I0929 20:07:51.072740 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_fed19b9e-ec0b-4944-a98c-885ec4862d48/glance-httpd/0.log" Sep 29 20:07:51 crc kubenswrapper[4779]: I0929 20:07:51.091844 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_download-cache-edpm-deployment-openstack-edpm-ipam-sf7pw_b2f2a76e-5c5c-4708-bc75-12909e8859fc/download-cache-edpm-deployment-openstack-edpm-ipam/0.log" Sep 29 20:07:51 crc kubenswrapper[4779]: I0929 20:07:51.262142 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_fed19b9e-ec0b-4944-a98c-885ec4862d48/glance-log/0.log" Sep 29 20:07:51 crc kubenswrapper[4779]: I0929 20:07:51.305122 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_880c493a-a9b5-4cdc-a4b1-256feeee3e1b/glance-httpd/0.log" Sep 29 20:07:51 crc kubenswrapper[4779]: I0929 20:07:51.452948 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_880c493a-a9b5-4cdc-a4b1-256feeee3e1b/glance-log/0.log" Sep 29 20:07:51 crc kubenswrapper[4779]: I0929 20:07:51.669377 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_horizon-fc6fd7df6-btpzz_6cd722c9-4e9b-4bad-a9fd-84529803680b/horizon/0.log" Sep 29 20:07:51 crc kubenswrapper[4779]: I0929 20:07:51.879729 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-certs-edpm-deployment-openstack-edpm-ipam-9xmjg_d00741d2-40ff-4d5c-b697-cc4ac7ed7511/install-certs-edpm-deployment-openstack-edpm-ipam/0.log" Sep 29 20:07:51 crc kubenswrapper[4779]: I0929 20:07:51.968470 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_horizon-fc6fd7df6-btpzz_6cd722c9-4e9b-4bad-a9fd-84529803680b/horizon-log/0.log" Sep 29 20:07:52 crc kubenswrapper[4779]: I0929 20:07:52.107287 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-os-edpm-deployment-openstack-edpm-ipam-lc87v_0651501f-91fe-410c-9e0c-d1e49760bedd/install-os-edpm-deployment-openstack-edpm-ipam/0.log" Sep 29 20:07:52 crc kubenswrapper[4779]: I0929 20:07:52.280660 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-6d4574fd6f-56jht_413037bd-ba8f-4874-a915-1c77426d689b/keystone-api/0.log" Sep 29 20:07:52 crc kubenswrapper[4779]: I0929 20:07:52.347369 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-cron-29319601-sr9fp_329c65e1-c7ba-4829-9ba7-6cbaf2e93d48/keystone-cron/0.log" Sep 29 20:07:52 crc kubenswrapper[4779]: I0929 20:07:52.434243 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_kube-state-metrics-0_43945a37-324b-4e37-a960-d92da8f5e56f/kube-state-metrics/0.log" Sep 29 20:07:52 crc kubenswrapper[4779]: I0929 20:07:52.577245 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_libvirt-edpm-deployment-openstack-edpm-ipam-r5m6z_7ff61060-93f6-4bd6-a6f9-75195322a8d2/libvirt-edpm-deployment-openstack-edpm-ipam/0.log" Sep 29 20:07:52 crc kubenswrapper[4779]: I0929 20:07:52.943747 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-d6758dbc9-fppqt_3225f7bc-88d9-4d11-a415-e6a421573849/neutron-api/0.log" Sep 29 20:07:52 crc kubenswrapper[4779]: I0929 20:07:52.967869 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-d6758dbc9-fppqt_3225f7bc-88d9-4d11-a415-e6a421573849/neutron-httpd/0.log" Sep 29 20:07:53 crc kubenswrapper[4779]: I0929 20:07:53.166207 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-metadata-edpm-deployment-openstack-edpm-ipam-fkq7j_b17f52c4-7329-4262-87d2-d5ef94e88f28/neutron-metadata-edpm-deployment-openstack-edpm-ipam/0.log" Sep 29 20:07:53 crc kubenswrapper[4779]: I0929 20:07:53.740197 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_5d230bec-c78b-45a3-b334-7353e1a8b827/nova-api-log/0.log" Sep 29 20:07:53 crc kubenswrapper[4779]: I0929 20:07:53.909439 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell0-conductor-0_7b53aa08-b1e7-4e69-86f3-830bb5c84002/nova-cell0-conductor-conductor/0.log" Sep 29 20:07:53 crc kubenswrapper[4779]: I0929 20:07:53.919979 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_5d230bec-c78b-45a3-b334-7353e1a8b827/nova-api-api/0.log" Sep 29 20:07:54 crc kubenswrapper[4779]: I0929 20:07:54.332488 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-conductor-0_87caa150-0994-48a8-816a-b6dccebf4092/nova-cell1-conductor-conductor/0.log" Sep 29 20:07:54 crc kubenswrapper[4779]: I0929 20:07:54.344168 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-novncproxy-0_d6e31019-bd61-45e8-9380-b973dcbe4873/nova-cell1-novncproxy-novncproxy/0.log" Sep 29 20:07:54 crc kubenswrapper[4779]: I0929 20:07:54.687006 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-edpm-deployment-openstack-edpm-ipam-h4cr7_06ca3195-0d79-4376-9627-6075a8cdf09c/nova-edpm-deployment-openstack-edpm-ipam/0.log" Sep 29 20:07:54 crc kubenswrapper[4779]: I0929 20:07:54.738961 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_5f5393b3-8bf7-4578-98c9-2323a8ec7ea6/nova-metadata-log/0.log" Sep 29 20:07:55 crc kubenswrapper[4779]: I0929 20:07:55.114304 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-scheduler-0_26aa6a8b-e675-4b3e-aa17-9ed17b49c907/nova-scheduler-scheduler/0.log" Sep 29 20:07:55 crc kubenswrapper[4779]: I0929 20:07:55.299618 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_31200251-0f84-4946-88fb-276aa79589d9/mysql-bootstrap/0.log" Sep 29 20:07:55 crc kubenswrapper[4779]: I0929 20:07:55.507175 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_31200251-0f84-4946-88fb-276aa79589d9/mysql-bootstrap/0.log" Sep 29 20:07:55 crc kubenswrapper[4779]: I0929 20:07:55.526822 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_31200251-0f84-4946-88fb-276aa79589d9/galera/0.log" Sep 29 20:07:55 crc kubenswrapper[4779]: I0929 20:07:55.752791 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_f5cf224b-f179-4c3c-bc69-5f3d448aca1d/mysql-bootstrap/0.log" Sep 29 20:07:55 crc kubenswrapper[4779]: I0929 20:07:55.992985 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_f5cf224b-f179-4c3c-bc69-5f3d448aca1d/mysql-bootstrap/0.log" Sep 29 20:07:56 crc kubenswrapper[4779]: I0929 20:07:56.021289 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_5f5393b3-8bf7-4578-98c9-2323a8ec7ea6/nova-metadata-metadata/0.log" Sep 29 20:07:56 crc kubenswrapper[4779]: I0929 20:07:56.043537 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_f5cf224b-f179-4c3c-bc69-5f3d448aca1d/galera/0.log" Sep 29 20:07:56 crc kubenswrapper[4779]: I0929 20:07:56.232715 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstackclient_58f628df-8d11-4663-b84b-0c810edaa5fb/openstackclient/0.log" Sep 29 20:07:56 crc kubenswrapper[4779]: I0929 20:07:56.467409 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-metrics-ds8vm_e1ff3819-9cf2-492f-a447-94a898e0a54d/openstack-network-exporter/0.log" Sep 29 20:07:56 crc kubenswrapper[4779]: I0929 20:07:56.556189 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-lx5dn_8cebec61-5d6f-4bd2-a9e7-61f0b0f74751/ovsdb-server-init/0.log" Sep 29 20:07:56 crc kubenswrapper[4779]: I0929 20:07:56.772480 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-lx5dn_8cebec61-5d6f-4bd2-a9e7-61f0b0f74751/ovs-vswitchd/0.log" Sep 29 20:07:56 crc kubenswrapper[4779]: I0929 20:07:56.815490 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-lx5dn_8cebec61-5d6f-4bd2-a9e7-61f0b0f74751/ovsdb-server-init/0.log" Sep 29 20:07:56 crc kubenswrapper[4779]: I0929 20:07:56.822761 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-lx5dn_8cebec61-5d6f-4bd2-a9e7-61f0b0f74751/ovsdb-server/0.log" Sep 29 20:07:57 crc kubenswrapper[4779]: I0929 20:07:57.051120 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-rwqzd_9771c712-00ce-4dcf-ab04-7b6893c8725c/ovn-controller/0.log" Sep 29 20:07:57 crc kubenswrapper[4779]: I0929 20:07:57.231085 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-edpm-deployment-openstack-edpm-ipam-626r9_0b9b5a28-3a66-4041-a143-8c8a40b27ef4/ovn-edpm-deployment-openstack-edpm-ipam/0.log" Sep 29 20:07:57 crc kubenswrapper[4779]: I0929 20:07:57.393212 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_210875cc-31fa-480b-bd01-a042fc73dcd6/openstack-network-exporter/0.log" Sep 29 20:07:57 crc kubenswrapper[4779]: I0929 20:07:57.487794 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_210875cc-31fa-480b-bd01-a042fc73dcd6/ovn-northd/0.log" Sep 29 20:07:57 crc kubenswrapper[4779]: I0929 20:07:57.580602 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_79136f36-d427-488e-81e2-ef55c73ee91a/openstack-network-exporter/0.log" Sep 29 20:07:57 crc kubenswrapper[4779]: I0929 20:07:57.685983 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_79136f36-d427-488e-81e2-ef55c73ee91a/ovsdbserver-nb/0.log" Sep 29 20:07:57 crc kubenswrapper[4779]: I0929 20:07:57.822049 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_bbcdb00b-849b-40c6-9e53-2c751dbd11dd/openstack-network-exporter/0.log" Sep 29 20:07:57 crc kubenswrapper[4779]: I0929 20:07:57.893035 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_bbcdb00b-849b-40c6-9e53-2c751dbd11dd/ovsdbserver-sb/0.log" Sep 29 20:07:58 crc kubenswrapper[4779]: I0929 20:07:58.093961 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-7846dbb58d-4ftsw_7b7c84ea-91fb-467d-b935-6f5034a2d7fb/placement-api/0.log" Sep 29 20:07:58 crc kubenswrapper[4779]: I0929 20:07:58.233380 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-7846dbb58d-4ftsw_7b7c84ea-91fb-467d-b935-6f5034a2d7fb/placement-log/0.log" Sep 29 20:07:58 crc kubenswrapper[4779]: I0929 20:07:58.288288 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_e0f654c2-f8a3-4049-a18c-75f12edc65ca/setup-container/0.log" Sep 29 20:07:58 crc kubenswrapper[4779]: I0929 20:07:58.555275 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_e0f654c2-f8a3-4049-a18c-75f12edc65ca/setup-container/0.log" Sep 29 20:07:58 crc kubenswrapper[4779]: I0929 20:07:58.635759 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_e0f654c2-f8a3-4049-a18c-75f12edc65ca/rabbitmq/0.log" Sep 29 20:07:58 crc kubenswrapper[4779]: I0929 20:07:58.778543 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_cde20801-b6a5-444f-ad26-2b36244bb38d/setup-container/0.log" Sep 29 20:07:59 crc kubenswrapper[4779]: I0929 20:07:59.062668 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_cde20801-b6a5-444f-ad26-2b36244bb38d/setup-container/0.log" Sep 29 20:07:59 crc kubenswrapper[4779]: I0929 20:07:59.072271 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_cde20801-b6a5-444f-ad26-2b36244bb38d/rabbitmq/0.log" Sep 29 20:07:59 crc kubenswrapper[4779]: I0929 20:07:59.195890 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_reboot-os-edpm-deployment-openstack-edpm-ipam-9k524_501fc953-417c-4abf-aafc-9cc25c3ecb23/reboot-os-edpm-deployment-openstack-edpm-ipam/0.log" Sep 29 20:07:59 crc kubenswrapper[4779]: I0929 20:07:59.283178 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_redhat-edpm-deployment-openstack-edpm-ipam-dht8n_630eccd4-a2c9-4003-b315-2d8d18ebeeba/redhat-edpm-deployment-openstack-edpm-ipam/0.log" Sep 29 20:07:59 crc kubenswrapper[4779]: I0929 20:07:59.479621 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_repo-setup-edpm-deployment-openstack-edpm-ipam-pkmk8_76990f22-5c56-482e-a5a6-6d3c74bba7cd/repo-setup-edpm-deployment-openstack-edpm-ipam/0.log" Sep 29 20:07:59 crc kubenswrapper[4779]: I0929 20:07:59.665349 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_run-os-edpm-deployment-openstack-edpm-ipam-6j4j9_aceb0e20-3731-4018-947e-40f2193b8c0a/run-os-edpm-deployment-openstack-edpm-ipam/0.log" Sep 29 20:07:59 crc kubenswrapper[4779]: I0929 20:07:59.730576 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ssh-known-hosts-edpm-deployment-nrsx2_a075ace8-eeae-4fa5-9353-72e217e82dfd/ssh-known-hosts-edpm-deployment/0.log" Sep 29 20:07:59 crc kubenswrapper[4779]: I0929 20:07:59.959076 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-proxy-76666bfbfc-dj7qj_1dd6be85-ce64-429a-9197-23450db2e2ad/proxy-server/0.log" Sep 29 20:08:00 crc kubenswrapper[4779]: I0929 20:08:00.064183 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-proxy-76666bfbfc-dj7qj_1dd6be85-ce64-429a-9197-23450db2e2ad/proxy-httpd/0.log" Sep 29 20:08:00 crc kubenswrapper[4779]: I0929 20:08:00.192943 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-ring-rebalance-wzdw7_587857be-cc5b-43cb-bf66-d9e7aadcc587/swift-ring-rebalance/0.log" Sep 29 20:08:00 crc kubenswrapper[4779]: I0929 20:08:00.348017 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_a214376c-8f64-4f89-9354-14de32e2f17f/account-auditor/0.log" Sep 29 20:08:00 crc kubenswrapper[4779]: I0929 20:08:00.386482 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_a214376c-8f64-4f89-9354-14de32e2f17f/account-reaper/0.log" Sep 29 20:08:00 crc kubenswrapper[4779]: I0929 20:08:00.564136 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_a214376c-8f64-4f89-9354-14de32e2f17f/account-replicator/0.log" Sep 29 20:08:00 crc kubenswrapper[4779]: I0929 20:08:00.565720 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_a214376c-8f64-4f89-9354-14de32e2f17f/container-auditor/0.log" Sep 29 20:08:00 crc kubenswrapper[4779]: I0929 20:08:00.568097 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_a214376c-8f64-4f89-9354-14de32e2f17f/account-server/0.log" Sep 29 20:08:00 crc kubenswrapper[4779]: I0929 20:08:00.778256 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_a214376c-8f64-4f89-9354-14de32e2f17f/container-server/0.log" Sep 29 20:08:00 crc kubenswrapper[4779]: I0929 20:08:00.798070 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_a214376c-8f64-4f89-9354-14de32e2f17f/container-replicator/0.log" Sep 29 20:08:00 crc kubenswrapper[4779]: I0929 20:08:00.856695 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_a214376c-8f64-4f89-9354-14de32e2f17f/container-updater/0.log" Sep 29 20:08:01 crc kubenswrapper[4779]: I0929 20:08:01.040312 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_a214376c-8f64-4f89-9354-14de32e2f17f/object-expirer/0.log" Sep 29 20:08:01 crc kubenswrapper[4779]: I0929 20:08:01.066466 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_a214376c-8f64-4f89-9354-14de32e2f17f/object-auditor/0.log" Sep 29 20:08:01 crc kubenswrapper[4779]: I0929 20:08:01.081852 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_a214376c-8f64-4f89-9354-14de32e2f17f/object-replicator/0.log" Sep 29 20:08:01 crc kubenswrapper[4779]: I0929 20:08:01.243866 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_a214376c-8f64-4f89-9354-14de32e2f17f/object-server/0.log" Sep 29 20:08:01 crc kubenswrapper[4779]: I0929 20:08:01.265675 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_a214376c-8f64-4f89-9354-14de32e2f17f/object-updater/0.log" Sep 29 20:08:01 crc kubenswrapper[4779]: I0929 20:08:01.301642 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_a214376c-8f64-4f89-9354-14de32e2f17f/rsync/0.log" Sep 29 20:08:01 crc kubenswrapper[4779]: I0929 20:08:01.435656 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_a214376c-8f64-4f89-9354-14de32e2f17f/swift-recon-cron/0.log" Sep 29 20:08:01 crc kubenswrapper[4779]: I0929 20:08:01.548989 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_telemetry-edpm-deployment-openstack-edpm-ipam-vf74d_a8bc7976-d585-4a94-b925-870996cc4ae3/telemetry-edpm-deployment-openstack-edpm-ipam/0.log" Sep 29 20:08:01 crc kubenswrapper[4779]: I0929 20:08:01.777750 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_tempest-tests-tempest_8bf09edd-ad1f-4883-ade0-8082b2055f60/tempest-tests-tempest-tests-runner/0.log" Sep 29 20:08:01 crc kubenswrapper[4779]: I0929 20:08:01.899696 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_test-operator-logs-pod-tempest-tempest-tests-tempest_ab44e769-9e8e-4cec-8fa4-97f93b25dd8c/test-operator-logs-container/0.log" Sep 29 20:08:02 crc kubenswrapper[4779]: I0929 20:08:02.086034 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_validate-network-edpm-deployment-openstack-edpm-ipam-9cnjt_2ef530d3-702c-44e3-a066-85a59398fafc/validate-network-edpm-deployment-openstack-edpm-ipam/0.log" Sep 29 20:08:10 crc kubenswrapper[4779]: I0929 20:08:10.033284 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_memcached-0_8de5fc49-d446-41aa-aa5e-d32fd04a281e/memcached/0.log" Sep 29 20:08:13 crc kubenswrapper[4779]: I0929 20:08:13.785302 4779 patch_prober.go:28] interesting pod/machine-config-daemon-d5cnr container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 20:08:13 crc kubenswrapper[4779]: I0929 20:08:13.785588 4779 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" podUID="476bc421-1113-455e-bcc8-e207e47dad19" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 20:08:43 crc kubenswrapper[4779]: I0929 20:08:43.785412 4779 patch_prober.go:28] interesting pod/machine-config-daemon-d5cnr container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 20:08:43 crc kubenswrapper[4779]: I0929 20:08:43.786345 4779 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" podUID="476bc421-1113-455e-bcc8-e207e47dad19" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 20:08:43 crc kubenswrapper[4779]: I0929 20:08:43.787159 4779 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" Sep 29 20:08:43 crc kubenswrapper[4779]: I0929 20:08:43.788278 4779 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"536cbc0e9dbc43b9fe51d71f773a7cc665ddb379a7083cffc33d4d69cea1ca23"} pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 29 20:08:43 crc kubenswrapper[4779]: I0929 20:08:43.788454 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" podUID="476bc421-1113-455e-bcc8-e207e47dad19" containerName="machine-config-daemon" containerID="cri-o://536cbc0e9dbc43b9fe51d71f773a7cc665ddb379a7083cffc33d4d69cea1ca23" gracePeriod=600 Sep 29 20:08:44 crc kubenswrapper[4779]: I0929 20:08:44.139404 4779 generic.go:334] "Generic (PLEG): container finished" podID="476bc421-1113-455e-bcc8-e207e47dad19" containerID="536cbc0e9dbc43b9fe51d71f773a7cc665ddb379a7083cffc33d4d69cea1ca23" exitCode=0 Sep 29 20:08:44 crc kubenswrapper[4779]: I0929 20:08:44.139566 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" event={"ID":"476bc421-1113-455e-bcc8-e207e47dad19","Type":"ContainerDied","Data":"536cbc0e9dbc43b9fe51d71f773a7cc665ddb379a7083cffc33d4d69cea1ca23"} Sep 29 20:08:44 crc kubenswrapper[4779]: I0929 20:08:44.139839 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" event={"ID":"476bc421-1113-455e-bcc8-e207e47dad19","Type":"ContainerStarted","Data":"ce7c9aaefc0e94314f1e9cd9f04a105e1ba3317878327817d7948af06e95f619"} Sep 29 20:08:44 crc kubenswrapper[4779]: I0929 20:08:44.139877 4779 scope.go:117] "RemoveContainer" containerID="6d7a711d10d7b815d17c868c16f7d944cff261ad893818784ad5bfb184ae219a" Sep 29 20:08:51 crc kubenswrapper[4779]: I0929 20:08:51.237608 4779 generic.go:334] "Generic (PLEG): container finished" podID="3e68ecff-bd00-4be9-a41b-ef164bf13041" containerID="2b1b5e83d4e4b3f186a56de03b4f18e1eeee2f49cc9fb0ca8a29c7efd2a66a7c" exitCode=0 Sep 29 20:08:51 crc kubenswrapper[4779]: I0929 20:08:51.237699 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-jnvd9/crc-debug-k6p64" event={"ID":"3e68ecff-bd00-4be9-a41b-ef164bf13041","Type":"ContainerDied","Data":"2b1b5e83d4e4b3f186a56de03b4f18e1eeee2f49cc9fb0ca8a29c7efd2a66a7c"} Sep 29 20:08:52 crc kubenswrapper[4779]: I0929 20:08:52.348244 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-jnvd9/crc-debug-k6p64" Sep 29 20:08:52 crc kubenswrapper[4779]: I0929 20:08:52.383259 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-jnvd9/crc-debug-k6p64"] Sep 29 20:08:52 crc kubenswrapper[4779]: I0929 20:08:52.389676 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-jnvd9/crc-debug-k6p64"] Sep 29 20:08:52 crc kubenswrapper[4779]: I0929 20:08:52.523059 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kb88f\" (UniqueName: \"kubernetes.io/projected/3e68ecff-bd00-4be9-a41b-ef164bf13041-kube-api-access-kb88f\") pod \"3e68ecff-bd00-4be9-a41b-ef164bf13041\" (UID: \"3e68ecff-bd00-4be9-a41b-ef164bf13041\") " Sep 29 20:08:52 crc kubenswrapper[4779]: I0929 20:08:52.523230 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/3e68ecff-bd00-4be9-a41b-ef164bf13041-host\") pod \"3e68ecff-bd00-4be9-a41b-ef164bf13041\" (UID: \"3e68ecff-bd00-4be9-a41b-ef164bf13041\") " Sep 29 20:08:52 crc kubenswrapper[4779]: I0929 20:08:52.523362 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/3e68ecff-bd00-4be9-a41b-ef164bf13041-host" (OuterVolumeSpecName: "host") pod "3e68ecff-bd00-4be9-a41b-ef164bf13041" (UID: "3e68ecff-bd00-4be9-a41b-ef164bf13041"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 29 20:08:52 crc kubenswrapper[4779]: I0929 20:08:52.524260 4779 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/3e68ecff-bd00-4be9-a41b-ef164bf13041-host\") on node \"crc\" DevicePath \"\"" Sep 29 20:08:52 crc kubenswrapper[4779]: I0929 20:08:52.532537 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3e68ecff-bd00-4be9-a41b-ef164bf13041-kube-api-access-kb88f" (OuterVolumeSpecName: "kube-api-access-kb88f") pod "3e68ecff-bd00-4be9-a41b-ef164bf13041" (UID: "3e68ecff-bd00-4be9-a41b-ef164bf13041"). InnerVolumeSpecName "kube-api-access-kb88f". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 20:08:52 crc kubenswrapper[4779]: I0929 20:08:52.625920 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kb88f\" (UniqueName: \"kubernetes.io/projected/3e68ecff-bd00-4be9-a41b-ef164bf13041-kube-api-access-kb88f\") on node \"crc\" DevicePath \"\"" Sep 29 20:08:53 crc kubenswrapper[4779]: I0929 20:08:53.259071 4779 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3f0d5b48df8ee5bbb6c9d8b6ebf872c9b265e7457888c27eea156e052dc6f522" Sep 29 20:08:53 crc kubenswrapper[4779]: I0929 20:08:53.259312 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-jnvd9/crc-debug-k6p64" Sep 29 20:08:53 crc kubenswrapper[4779]: I0929 20:08:53.573721 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-jnvd9/crc-debug-64tv8"] Sep 29 20:08:53 crc kubenswrapper[4779]: E0929 20:08:53.574199 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3e68ecff-bd00-4be9-a41b-ef164bf13041" containerName="container-00" Sep 29 20:08:53 crc kubenswrapper[4779]: I0929 20:08:53.574219 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="3e68ecff-bd00-4be9-a41b-ef164bf13041" containerName="container-00" Sep 29 20:08:53 crc kubenswrapper[4779]: I0929 20:08:53.574496 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="3e68ecff-bd00-4be9-a41b-ef164bf13041" containerName="container-00" Sep 29 20:08:53 crc kubenswrapper[4779]: I0929 20:08:53.575282 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-jnvd9/crc-debug-64tv8" Sep 29 20:08:53 crc kubenswrapper[4779]: I0929 20:08:53.650727 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6zd7m\" (UniqueName: \"kubernetes.io/projected/e8c24243-d603-4a17-a675-8bf1c2af1a93-kube-api-access-6zd7m\") pod \"crc-debug-64tv8\" (UID: \"e8c24243-d603-4a17-a675-8bf1c2af1a93\") " pod="openshift-must-gather-jnvd9/crc-debug-64tv8" Sep 29 20:08:53 crc kubenswrapper[4779]: I0929 20:08:53.650781 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/e8c24243-d603-4a17-a675-8bf1c2af1a93-host\") pod \"crc-debug-64tv8\" (UID: \"e8c24243-d603-4a17-a675-8bf1c2af1a93\") " pod="openshift-must-gather-jnvd9/crc-debug-64tv8" Sep 29 20:08:53 crc kubenswrapper[4779]: I0929 20:08:53.752231 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6zd7m\" (UniqueName: \"kubernetes.io/projected/e8c24243-d603-4a17-a675-8bf1c2af1a93-kube-api-access-6zd7m\") pod \"crc-debug-64tv8\" (UID: \"e8c24243-d603-4a17-a675-8bf1c2af1a93\") " pod="openshift-must-gather-jnvd9/crc-debug-64tv8" Sep 29 20:08:53 crc kubenswrapper[4779]: I0929 20:08:53.752283 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/e8c24243-d603-4a17-a675-8bf1c2af1a93-host\") pod \"crc-debug-64tv8\" (UID: \"e8c24243-d603-4a17-a675-8bf1c2af1a93\") " pod="openshift-must-gather-jnvd9/crc-debug-64tv8" Sep 29 20:08:53 crc kubenswrapper[4779]: I0929 20:08:53.752448 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/e8c24243-d603-4a17-a675-8bf1c2af1a93-host\") pod \"crc-debug-64tv8\" (UID: \"e8c24243-d603-4a17-a675-8bf1c2af1a93\") " pod="openshift-must-gather-jnvd9/crc-debug-64tv8" Sep 29 20:08:53 crc kubenswrapper[4779]: I0929 20:08:53.777254 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6zd7m\" (UniqueName: \"kubernetes.io/projected/e8c24243-d603-4a17-a675-8bf1c2af1a93-kube-api-access-6zd7m\") pod \"crc-debug-64tv8\" (UID: \"e8c24243-d603-4a17-a675-8bf1c2af1a93\") " pod="openshift-must-gather-jnvd9/crc-debug-64tv8" Sep 29 20:08:53 crc kubenswrapper[4779]: I0929 20:08:53.786461 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3e68ecff-bd00-4be9-a41b-ef164bf13041" path="/var/lib/kubelet/pods/3e68ecff-bd00-4be9-a41b-ef164bf13041/volumes" Sep 29 20:08:53 crc kubenswrapper[4779]: I0929 20:08:53.894427 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-jnvd9/crc-debug-64tv8" Sep 29 20:08:54 crc kubenswrapper[4779]: I0929 20:08:54.268930 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-jnvd9/crc-debug-64tv8" event={"ID":"e8c24243-d603-4a17-a675-8bf1c2af1a93","Type":"ContainerStarted","Data":"c9c67766d333b412557235c05f208d0ebd2a98c525f7d01f62bfbccae78dba4c"} Sep 29 20:08:54 crc kubenswrapper[4779]: I0929 20:08:54.269333 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-jnvd9/crc-debug-64tv8" event={"ID":"e8c24243-d603-4a17-a675-8bf1c2af1a93","Type":"ContainerStarted","Data":"b8b7bfda80142ba9b0f9b8e0c66342bb0daaa441c8628dd097e450e5afc93934"} Sep 29 20:08:54 crc kubenswrapper[4779]: I0929 20:08:54.290282 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-jnvd9/crc-debug-64tv8" podStartSLOduration=1.290262513 podStartE2EDuration="1.290262513s" podCreationTimestamp="2025-09-29 20:08:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 20:08:54.280721913 +0000 UTC m=+3645.165147043" watchObservedRunningTime="2025-09-29 20:08:54.290262513 +0000 UTC m=+3645.174687623" Sep 29 20:08:55 crc kubenswrapper[4779]: I0929 20:08:55.278939 4779 generic.go:334] "Generic (PLEG): container finished" podID="e8c24243-d603-4a17-a675-8bf1c2af1a93" containerID="c9c67766d333b412557235c05f208d0ebd2a98c525f7d01f62bfbccae78dba4c" exitCode=0 Sep 29 20:08:55 crc kubenswrapper[4779]: I0929 20:08:55.279036 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-jnvd9/crc-debug-64tv8" event={"ID":"e8c24243-d603-4a17-a675-8bf1c2af1a93","Type":"ContainerDied","Data":"c9c67766d333b412557235c05f208d0ebd2a98c525f7d01f62bfbccae78dba4c"} Sep 29 20:08:56 crc kubenswrapper[4779]: I0929 20:08:56.374616 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-jnvd9/crc-debug-64tv8" Sep 29 20:08:56 crc kubenswrapper[4779]: I0929 20:08:56.393710 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/e8c24243-d603-4a17-a675-8bf1c2af1a93-host\") pod \"e8c24243-d603-4a17-a675-8bf1c2af1a93\" (UID: \"e8c24243-d603-4a17-a675-8bf1c2af1a93\") " Sep 29 20:08:56 crc kubenswrapper[4779]: I0929 20:08:56.393837 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6zd7m\" (UniqueName: \"kubernetes.io/projected/e8c24243-d603-4a17-a675-8bf1c2af1a93-kube-api-access-6zd7m\") pod \"e8c24243-d603-4a17-a675-8bf1c2af1a93\" (UID: \"e8c24243-d603-4a17-a675-8bf1c2af1a93\") " Sep 29 20:08:56 crc kubenswrapper[4779]: I0929 20:08:56.394099 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/e8c24243-d603-4a17-a675-8bf1c2af1a93-host" (OuterVolumeSpecName: "host") pod "e8c24243-d603-4a17-a675-8bf1c2af1a93" (UID: "e8c24243-d603-4a17-a675-8bf1c2af1a93"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 29 20:08:56 crc kubenswrapper[4779]: I0929 20:08:56.394209 4779 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/e8c24243-d603-4a17-a675-8bf1c2af1a93-host\") on node \"crc\" DevicePath \"\"" Sep 29 20:08:56 crc kubenswrapper[4779]: I0929 20:08:56.400292 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e8c24243-d603-4a17-a675-8bf1c2af1a93-kube-api-access-6zd7m" (OuterVolumeSpecName: "kube-api-access-6zd7m") pod "e8c24243-d603-4a17-a675-8bf1c2af1a93" (UID: "e8c24243-d603-4a17-a675-8bf1c2af1a93"). InnerVolumeSpecName "kube-api-access-6zd7m". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 20:08:56 crc kubenswrapper[4779]: I0929 20:08:56.495677 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6zd7m\" (UniqueName: \"kubernetes.io/projected/e8c24243-d603-4a17-a675-8bf1c2af1a93-kube-api-access-6zd7m\") on node \"crc\" DevicePath \"\"" Sep 29 20:08:57 crc kubenswrapper[4779]: I0929 20:08:57.295438 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-jnvd9/crc-debug-64tv8" event={"ID":"e8c24243-d603-4a17-a675-8bf1c2af1a93","Type":"ContainerDied","Data":"b8b7bfda80142ba9b0f9b8e0c66342bb0daaa441c8628dd097e450e5afc93934"} Sep 29 20:08:57 crc kubenswrapper[4779]: I0929 20:08:57.295764 4779 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b8b7bfda80142ba9b0f9b8e0c66342bb0daaa441c8628dd097e450e5afc93934" Sep 29 20:08:57 crc kubenswrapper[4779]: I0929 20:08:57.295531 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-jnvd9/crc-debug-64tv8" Sep 29 20:09:00 crc kubenswrapper[4779]: I0929 20:09:00.782574 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-jnvd9/crc-debug-64tv8"] Sep 29 20:09:00 crc kubenswrapper[4779]: I0929 20:09:00.789667 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-jnvd9/crc-debug-64tv8"] Sep 29 20:09:01 crc kubenswrapper[4779]: I0929 20:09:01.781014 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e8c24243-d603-4a17-a675-8bf1c2af1a93" path="/var/lib/kubelet/pods/e8c24243-d603-4a17-a675-8bf1c2af1a93/volumes" Sep 29 20:09:01 crc kubenswrapper[4779]: I0929 20:09:01.988119 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-jnvd9/crc-debug-d8m87"] Sep 29 20:09:01 crc kubenswrapper[4779]: E0929 20:09:01.988935 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e8c24243-d603-4a17-a675-8bf1c2af1a93" containerName="container-00" Sep 29 20:09:01 crc kubenswrapper[4779]: I0929 20:09:01.988951 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="e8c24243-d603-4a17-a675-8bf1c2af1a93" containerName="container-00" Sep 29 20:09:01 crc kubenswrapper[4779]: I0929 20:09:01.989164 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="e8c24243-d603-4a17-a675-8bf1c2af1a93" containerName="container-00" Sep 29 20:09:01 crc kubenswrapper[4779]: I0929 20:09:01.989994 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-jnvd9/crc-debug-d8m87" Sep 29 20:09:02 crc kubenswrapper[4779]: I0929 20:09:02.091347 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nmt7t\" (UniqueName: \"kubernetes.io/projected/85864f53-0af4-4656-b07e-ec58ad0c9d1f-kube-api-access-nmt7t\") pod \"crc-debug-d8m87\" (UID: \"85864f53-0af4-4656-b07e-ec58ad0c9d1f\") " pod="openshift-must-gather-jnvd9/crc-debug-d8m87" Sep 29 20:09:02 crc kubenswrapper[4779]: I0929 20:09:02.091526 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/85864f53-0af4-4656-b07e-ec58ad0c9d1f-host\") pod \"crc-debug-d8m87\" (UID: \"85864f53-0af4-4656-b07e-ec58ad0c9d1f\") " pod="openshift-must-gather-jnvd9/crc-debug-d8m87" Sep 29 20:09:02 crc kubenswrapper[4779]: I0929 20:09:02.193232 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nmt7t\" (UniqueName: \"kubernetes.io/projected/85864f53-0af4-4656-b07e-ec58ad0c9d1f-kube-api-access-nmt7t\") pod \"crc-debug-d8m87\" (UID: \"85864f53-0af4-4656-b07e-ec58ad0c9d1f\") " pod="openshift-must-gather-jnvd9/crc-debug-d8m87" Sep 29 20:09:02 crc kubenswrapper[4779]: I0929 20:09:02.193358 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/85864f53-0af4-4656-b07e-ec58ad0c9d1f-host\") pod \"crc-debug-d8m87\" (UID: \"85864f53-0af4-4656-b07e-ec58ad0c9d1f\") " pod="openshift-must-gather-jnvd9/crc-debug-d8m87" Sep 29 20:09:02 crc kubenswrapper[4779]: I0929 20:09:02.193469 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/85864f53-0af4-4656-b07e-ec58ad0c9d1f-host\") pod \"crc-debug-d8m87\" (UID: \"85864f53-0af4-4656-b07e-ec58ad0c9d1f\") " pod="openshift-must-gather-jnvd9/crc-debug-d8m87" Sep 29 20:09:02 crc kubenswrapper[4779]: I0929 20:09:02.222594 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nmt7t\" (UniqueName: \"kubernetes.io/projected/85864f53-0af4-4656-b07e-ec58ad0c9d1f-kube-api-access-nmt7t\") pod \"crc-debug-d8m87\" (UID: \"85864f53-0af4-4656-b07e-ec58ad0c9d1f\") " pod="openshift-must-gather-jnvd9/crc-debug-d8m87" Sep 29 20:09:02 crc kubenswrapper[4779]: I0929 20:09:02.321078 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-jnvd9/crc-debug-d8m87" Sep 29 20:09:02 crc kubenswrapper[4779]: W0929 20:09:02.344253 4779 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod85864f53_0af4_4656_b07e_ec58ad0c9d1f.slice/crio-8b47d97fdf1c2c74cf2cac6bdaff8450546115733f89bb443e13acf9f34b11c2 WatchSource:0}: Error finding container 8b47d97fdf1c2c74cf2cac6bdaff8450546115733f89bb443e13acf9f34b11c2: Status 404 returned error can't find the container with id 8b47d97fdf1c2c74cf2cac6bdaff8450546115733f89bb443e13acf9f34b11c2 Sep 29 20:09:03 crc kubenswrapper[4779]: I0929 20:09:03.348310 4779 generic.go:334] "Generic (PLEG): container finished" podID="85864f53-0af4-4656-b07e-ec58ad0c9d1f" containerID="28537167ab4740c71754b35b103ea7f76b5b45eae3358639ecf916e7cc59a63f" exitCode=0 Sep 29 20:09:03 crc kubenswrapper[4779]: I0929 20:09:03.348432 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-jnvd9/crc-debug-d8m87" event={"ID":"85864f53-0af4-4656-b07e-ec58ad0c9d1f","Type":"ContainerDied","Data":"28537167ab4740c71754b35b103ea7f76b5b45eae3358639ecf916e7cc59a63f"} Sep 29 20:09:03 crc kubenswrapper[4779]: I0929 20:09:03.348704 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-jnvd9/crc-debug-d8m87" event={"ID":"85864f53-0af4-4656-b07e-ec58ad0c9d1f","Type":"ContainerStarted","Data":"8b47d97fdf1c2c74cf2cac6bdaff8450546115733f89bb443e13acf9f34b11c2"} Sep 29 20:09:03 crc kubenswrapper[4779]: I0929 20:09:03.397304 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-jnvd9/crc-debug-d8m87"] Sep 29 20:09:03 crc kubenswrapper[4779]: I0929 20:09:03.414904 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-jnvd9/crc-debug-d8m87"] Sep 29 20:09:04 crc kubenswrapper[4779]: I0929 20:09:04.463482 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-jnvd9/crc-debug-d8m87" Sep 29 20:09:04 crc kubenswrapper[4779]: I0929 20:09:04.635067 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/85864f53-0af4-4656-b07e-ec58ad0c9d1f-host\") pod \"85864f53-0af4-4656-b07e-ec58ad0c9d1f\" (UID: \"85864f53-0af4-4656-b07e-ec58ad0c9d1f\") " Sep 29 20:09:04 crc kubenswrapper[4779]: I0929 20:09:04.635235 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/85864f53-0af4-4656-b07e-ec58ad0c9d1f-host" (OuterVolumeSpecName: "host") pod "85864f53-0af4-4656-b07e-ec58ad0c9d1f" (UID: "85864f53-0af4-4656-b07e-ec58ad0c9d1f"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 29 20:09:04 crc kubenswrapper[4779]: I0929 20:09:04.635286 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nmt7t\" (UniqueName: \"kubernetes.io/projected/85864f53-0af4-4656-b07e-ec58ad0c9d1f-kube-api-access-nmt7t\") pod \"85864f53-0af4-4656-b07e-ec58ad0c9d1f\" (UID: \"85864f53-0af4-4656-b07e-ec58ad0c9d1f\") " Sep 29 20:09:04 crc kubenswrapper[4779]: I0929 20:09:04.635707 4779 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/85864f53-0af4-4656-b07e-ec58ad0c9d1f-host\") on node \"crc\" DevicePath \"\"" Sep 29 20:09:04 crc kubenswrapper[4779]: I0929 20:09:04.641613 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/85864f53-0af4-4656-b07e-ec58ad0c9d1f-kube-api-access-nmt7t" (OuterVolumeSpecName: "kube-api-access-nmt7t") pod "85864f53-0af4-4656-b07e-ec58ad0c9d1f" (UID: "85864f53-0af4-4656-b07e-ec58ad0c9d1f"). InnerVolumeSpecName "kube-api-access-nmt7t". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 20:09:04 crc kubenswrapper[4779]: I0929 20:09:04.737770 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nmt7t\" (UniqueName: \"kubernetes.io/projected/85864f53-0af4-4656-b07e-ec58ad0c9d1f-kube-api-access-nmt7t\") on node \"crc\" DevicePath \"\"" Sep 29 20:09:04 crc kubenswrapper[4779]: I0929 20:09:04.923857 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_b0ce77c3c22bafa9d9845daaf088adfa5ab8db9a8342ffc3a6e0496b69h87gh_e83c2f7a-1165-4d7b-ba2f-ec47f6291cfd/util/0.log" Sep 29 20:09:05 crc kubenswrapper[4779]: I0929 20:09:05.163334 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_b0ce77c3c22bafa9d9845daaf088adfa5ab8db9a8342ffc3a6e0496b69h87gh_e83c2f7a-1165-4d7b-ba2f-ec47f6291cfd/util/0.log" Sep 29 20:09:05 crc kubenswrapper[4779]: I0929 20:09:05.200063 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_b0ce77c3c22bafa9d9845daaf088adfa5ab8db9a8342ffc3a6e0496b69h87gh_e83c2f7a-1165-4d7b-ba2f-ec47f6291cfd/pull/0.log" Sep 29 20:09:05 crc kubenswrapper[4779]: I0929 20:09:05.224193 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_b0ce77c3c22bafa9d9845daaf088adfa5ab8db9a8342ffc3a6e0496b69h87gh_e83c2f7a-1165-4d7b-ba2f-ec47f6291cfd/pull/0.log" Sep 29 20:09:05 crc kubenswrapper[4779]: I0929 20:09:05.366199 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_b0ce77c3c22bafa9d9845daaf088adfa5ab8db9a8342ffc3a6e0496b69h87gh_e83c2f7a-1165-4d7b-ba2f-ec47f6291cfd/util/0.log" Sep 29 20:09:05 crc kubenswrapper[4779]: I0929 20:09:05.367138 4779 scope.go:117] "RemoveContainer" containerID="28537167ab4740c71754b35b103ea7f76b5b45eae3358639ecf916e7cc59a63f" Sep 29 20:09:05 crc kubenswrapper[4779]: I0929 20:09:05.367382 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-jnvd9/crc-debug-d8m87" Sep 29 20:09:05 crc kubenswrapper[4779]: I0929 20:09:05.419420 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_b0ce77c3c22bafa9d9845daaf088adfa5ab8db9a8342ffc3a6e0496b69h87gh_e83c2f7a-1165-4d7b-ba2f-ec47f6291cfd/pull/0.log" Sep 29 20:09:05 crc kubenswrapper[4779]: I0929 20:09:05.464841 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_b0ce77c3c22bafa9d9845daaf088adfa5ab8db9a8342ffc3a6e0496b69h87gh_e83c2f7a-1165-4d7b-ba2f-ec47f6291cfd/extract/0.log" Sep 29 20:09:05 crc kubenswrapper[4779]: I0929 20:09:05.571568 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-6ff8b75857-l2cw6_7a72bb32-1401-4fb3-a8b5-e2c9d3c7e997/kube-rbac-proxy/0.log" Sep 29 20:09:05 crc kubenswrapper[4779]: I0929 20:09:05.690418 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-644bddb6d8-txbkn_5d20194a-c49a-4da1-a081-23d5c3bde845/kube-rbac-proxy/0.log" Sep 29 20:09:05 crc kubenswrapper[4779]: I0929 20:09:05.695843 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-6ff8b75857-l2cw6_7a72bb32-1401-4fb3-a8b5-e2c9d3c7e997/manager/0.log" Sep 29 20:09:05 crc kubenswrapper[4779]: I0929 20:09:05.777095 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="85864f53-0af4-4656-b07e-ec58ad0c9d1f" path="/var/lib/kubelet/pods/85864f53-0af4-4656-b07e-ec58ad0c9d1f/volumes" Sep 29 20:09:05 crc kubenswrapper[4779]: I0929 20:09:05.786728 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-644bddb6d8-txbkn_5d20194a-c49a-4da1-a081-23d5c3bde845/manager/0.log" Sep 29 20:09:05 crc kubenswrapper[4779]: I0929 20:09:05.879777 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-84f4f7b77b-kmgvf_0e2748e0-ee66-45a1-b018-0798ad0ef293/manager/0.log" Sep 29 20:09:05 crc kubenswrapper[4779]: I0929 20:09:05.881198 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-84f4f7b77b-kmgvf_0e2748e0-ee66-45a1-b018-0798ad0ef293/kube-rbac-proxy/0.log" Sep 29 20:09:06 crc kubenswrapper[4779]: I0929 20:09:06.004921 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-84958c4d49-9xr2s_68c5f3eb-52f4-4ede-ac89-f3a9aafe421b/kube-rbac-proxy/0.log" Sep 29 20:09:06 crc kubenswrapper[4779]: I0929 20:09:06.117984 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-84958c4d49-9xr2s_68c5f3eb-52f4-4ede-ac89-f3a9aafe421b/manager/0.log" Sep 29 20:09:06 crc kubenswrapper[4779]: I0929 20:09:06.169933 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-5d889d78cf-fql2p_df41de35-4c6d-4313-8ccb-19dcead38269/kube-rbac-proxy/0.log" Sep 29 20:09:06 crc kubenswrapper[4779]: I0929 20:09:06.208814 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-5d889d78cf-fql2p_df41de35-4c6d-4313-8ccb-19dcead38269/manager/0.log" Sep 29 20:09:06 crc kubenswrapper[4779]: I0929 20:09:06.323967 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-9f4696d94-8ndmd_0b008477-9497-4cb1-9b44-c8c0dacbd0ae/kube-rbac-proxy/0.log" Sep 29 20:09:06 crc kubenswrapper[4779]: I0929 20:09:06.386983 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-9f4696d94-8ndmd_0b008477-9497-4cb1-9b44-c8c0dacbd0ae/manager/0.log" Sep 29 20:09:06 crc kubenswrapper[4779]: I0929 20:09:06.516167 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-7d857cc749-7pf7d_50a9326b-f577-4994-ba3a-28f1ffb1df6c/kube-rbac-proxy/0.log" Sep 29 20:09:06 crc kubenswrapper[4779]: I0929 20:09:06.681826 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-7d857cc749-7pf7d_50a9326b-f577-4994-ba3a-28f1ffb1df6c/manager/0.log" Sep 29 20:09:06 crc kubenswrapper[4779]: I0929 20:09:06.690700 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-7975b88857-46trm_36748ad5-2673-4d95-ada2-7ff95f740fa9/kube-rbac-proxy/0.log" Sep 29 20:09:06 crc kubenswrapper[4779]: I0929 20:09:06.722413 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-7975b88857-46trm_36748ad5-2673-4d95-ada2-7ff95f740fa9/manager/0.log" Sep 29 20:09:06 crc kubenswrapper[4779]: I0929 20:09:06.927229 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-5bd55b4bff-svxsl_65ba17f1-ab1d-4e5b-b204-2ecc74c7daa1/kube-rbac-proxy/0.log" Sep 29 20:09:07 crc kubenswrapper[4779]: I0929 20:09:07.014544 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-5bd55b4bff-svxsl_65ba17f1-ab1d-4e5b-b204-2ecc74c7daa1/manager/0.log" Sep 29 20:09:07 crc kubenswrapper[4779]: I0929 20:09:07.090794 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-6d68dbc695-6x9z9_15635458-2ece-4c4b-a011-1c82d097bfdf/kube-rbac-proxy/0.log" Sep 29 20:09:07 crc kubenswrapper[4779]: I0929 20:09:07.118739 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-6d68dbc695-6x9z9_15635458-2ece-4c4b-a011-1c82d097bfdf/manager/0.log" Sep 29 20:09:07 crc kubenswrapper[4779]: I0929 20:09:07.251021 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-88c7-k26dh_833ffe0d-b2b2-4fd5-8094-ad9fe58f60c0/kube-rbac-proxy/0.log" Sep 29 20:09:07 crc kubenswrapper[4779]: I0929 20:09:07.314142 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-88c7-k26dh_833ffe0d-b2b2-4fd5-8094-ad9fe58f60c0/manager/0.log" Sep 29 20:09:07 crc kubenswrapper[4779]: I0929 20:09:07.390952 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-64d7b59854-kvpnd_9ae9e131-70db-4bd1-8347-c5714c2b4754/kube-rbac-proxy/0.log" Sep 29 20:09:07 crc kubenswrapper[4779]: I0929 20:09:07.489129 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-64d7b59854-kvpnd_9ae9e131-70db-4bd1-8347-c5714c2b4754/manager/0.log" Sep 29 20:09:07 crc kubenswrapper[4779]: I0929 20:09:07.576271 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-c7c776c96-94ldb_9449cbcb-f74f-473e-9c0d-f1737b39c383/kube-rbac-proxy/0.log" Sep 29 20:09:07 crc kubenswrapper[4779]: I0929 20:09:07.710425 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-c7c776c96-94ldb_9449cbcb-f74f-473e-9c0d-f1737b39c383/manager/0.log" Sep 29 20:09:07 crc kubenswrapper[4779]: I0929 20:09:07.830241 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-76fcc6dc7c-mtqwc_1c3a147f-0c72-4889-80aa-8b53a0c9ea3f/kube-rbac-proxy/0.log" Sep 29 20:09:07 crc kubenswrapper[4779]: I0929 20:09:07.857292 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-76fcc6dc7c-mtqwc_1c3a147f-0c72-4889-80aa-8b53a0c9ea3f/manager/0.log" Sep 29 20:09:07 crc kubenswrapper[4779]: I0929 20:09:07.976693 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-6d776955-54f6z_49adbd52-e22f-4cc8-9ccb-6cb8c64f12f4/kube-rbac-proxy/0.log" Sep 29 20:09:08 crc kubenswrapper[4779]: I0929 20:09:08.058398 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-6d776955-54f6z_49adbd52-e22f-4cc8-9ccb-6cb8c64f12f4/manager/0.log" Sep 29 20:09:08 crc kubenswrapper[4779]: I0929 20:09:08.111354 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-manager-f846cdb6-8tk8p_3c04c1cb-ecb2-42a0-82e6-3c2842508041/kube-rbac-proxy/0.log" Sep 29 20:09:08 crc kubenswrapper[4779]: I0929 20:09:08.372510 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-operator-764cfd59bc-2w6ls_8988ff92-ee96-4702-875b-a311c8d08a7b/kube-rbac-proxy/0.log" Sep 29 20:09:08 crc kubenswrapper[4779]: I0929 20:09:08.504405 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-operator-764cfd59bc-2w6ls_8988ff92-ee96-4702-875b-a311c8d08a7b/operator/0.log" Sep 29 20:09:08 crc kubenswrapper[4779]: I0929 20:09:08.546402 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-index-pwzpf_96bf8594-ea42-4a66-baa7-39679ec2aa5d/registry-server/0.log" Sep 29 20:09:08 crc kubenswrapper[4779]: I0929 20:09:08.680217 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-9976ff44c-f7xwn_63554382-d024-4d43-b5c5-b31b80d47749/kube-rbac-proxy/0.log" Sep 29 20:09:08 crc kubenswrapper[4779]: I0929 20:09:08.843794 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-9976ff44c-f7xwn_63554382-d024-4d43-b5c5-b31b80d47749/manager/0.log" Sep 29 20:09:08 crc kubenswrapper[4779]: I0929 20:09:08.849915 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-589c58c6c-8fjvn_dfc872d3-d6c0-42af-9ab7-7695257d969f/kube-rbac-proxy/0.log" Sep 29 20:09:08 crc kubenswrapper[4779]: I0929 20:09:08.954288 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-589c58c6c-8fjvn_dfc872d3-d6c0-42af-9ab7-7695257d969f/manager/0.log" Sep 29 20:09:09 crc kubenswrapper[4779]: I0929 20:09:09.100630 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_rabbitmq-cluster-operator-manager-79d8469568-wl7d2_5c9afdac-c252-4cd4-afb1-9d7fb43d86e1/operator/0.log" Sep 29 20:09:09 crc kubenswrapper[4779]: I0929 20:09:09.230532 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-bc7dc7bd9-9jgwn_38ef5cba-94db-4e0d-b2ad-290293848c65/kube-rbac-proxy/0.log" Sep 29 20:09:09 crc kubenswrapper[4779]: I0929 20:09:09.274946 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-manager-f846cdb6-8tk8p_3c04c1cb-ecb2-42a0-82e6-3c2842508041/manager/0.log" Sep 29 20:09:09 crc kubenswrapper[4779]: I0929 20:09:09.349406 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-b8d54b5d7-tflz6_cdc62734-f794-43fc-9af8-752098cdf316/kube-rbac-proxy/0.log" Sep 29 20:09:09 crc kubenswrapper[4779]: I0929 20:09:09.354509 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-bc7dc7bd9-9jgwn_38ef5cba-94db-4e0d-b2ad-290293848c65/manager/0.log" Sep 29 20:09:09 crc kubenswrapper[4779]: I0929 20:09:09.492154 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-b8d54b5d7-tflz6_cdc62734-f794-43fc-9af8-752098cdf316/manager/0.log" Sep 29 20:09:09 crc kubenswrapper[4779]: I0929 20:09:09.537923 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-f66b554c6-dlgq2_13740318-83f1-4384-9b4c-b8de793773d3/manager/0.log" Sep 29 20:09:09 crc kubenswrapper[4779]: I0929 20:09:09.557173 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-f66b554c6-dlgq2_13740318-83f1-4384-9b4c-b8de793773d3/kube-rbac-proxy/0.log" Sep 29 20:09:09 crc kubenswrapper[4779]: I0929 20:09:09.650930 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-76669f99c-crptq_a15b1202-c010-40ae-be51-75fbb766fba0/kube-rbac-proxy/0.log" Sep 29 20:09:09 crc kubenswrapper[4779]: I0929 20:09:09.674910 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-76669f99c-crptq_a15b1202-c010-40ae-be51-75fbb766fba0/manager/0.log" Sep 29 20:09:25 crc kubenswrapper[4779]: I0929 20:09:25.115157 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_control-plane-machine-set-operator-78cbb6b69f-fcw8k_e9fff169-c8a1-4062-9a2a-ab4c1e790c07/control-plane-machine-set-operator/0.log" Sep 29 20:09:25 crc kubenswrapper[4779]: I0929 20:09:25.252799 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-66qtp_0ab1562e-a39e-4ddf-95ee-cf6ff520883d/kube-rbac-proxy/0.log" Sep 29 20:09:25 crc kubenswrapper[4779]: I0929 20:09:25.299856 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-66qtp_0ab1562e-a39e-4ddf-95ee-cf6ff520883d/machine-api-operator/0.log" Sep 29 20:09:37 crc kubenswrapper[4779]: I0929 20:09:37.736396 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-5b446d88c5-g5jq2_599cf28e-c7ed-4c1a-a84d-ae90ec0708ba/cert-manager-controller/0.log" Sep 29 20:09:37 crc kubenswrapper[4779]: I0929 20:09:37.889555 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-cainjector-7f985d654d-zpt2c_d0e20784-de41-4e9c-8c95-f047e75f30fd/cert-manager-cainjector/0.log" Sep 29 20:09:37 crc kubenswrapper[4779]: I0929 20:09:37.945810 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-webhook-5655c58dd6-bf8nn_3566f49d-2c94-40d8-b5b1-aa51cc7c043b/cert-manager-webhook/0.log" Sep 29 20:09:49 crc kubenswrapper[4779]: I0929 20:09:49.729306 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-console-plugin-864bb6dfb5-nvgwc_6c719c4d-6f34-4427-8c72-69a5c0efe754/nmstate-console-plugin/0.log" Sep 29 20:09:49 crc kubenswrapper[4779]: I0929 20:09:49.838211 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-handler-v9vn6_c74d7452-c58a-4336-9acf-acc9190816a9/nmstate-handler/0.log" Sep 29 20:09:49 crc kubenswrapper[4779]: I0929 20:09:49.926681 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-58fcddf996-zwk4x_c0c8d102-b0f5-4ebd-ad41-3359fd330e5c/kube-rbac-proxy/0.log" Sep 29 20:09:49 crc kubenswrapper[4779]: I0929 20:09:49.952211 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-58fcddf996-zwk4x_c0c8d102-b0f5-4ebd-ad41-3359fd330e5c/nmstate-metrics/0.log" Sep 29 20:09:50 crc kubenswrapper[4779]: I0929 20:09:50.066444 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-operator-5d6f6cfd66-98lp2_5df72cf6-fbb4-4160-afad-5fb056e747ed/nmstate-operator/0.log" Sep 29 20:09:50 crc kubenswrapper[4779]: I0929 20:09:50.167427 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-webhook-6d689559c5-qhlnm_ef026a49-4282-45ef-b535-288ac25fe011/nmstate-webhook/0.log" Sep 29 20:10:03 crc kubenswrapper[4779]: I0929 20:10:03.420170 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-5d688f5ffc-l5gr5_127fd6d6-e32b-4152-9f62-23b6b051318d/kube-rbac-proxy/0.log" Sep 29 20:10:03 crc kubenswrapper[4779]: I0929 20:10:03.498948 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-5d688f5ffc-l5gr5_127fd6d6-e32b-4152-9f62-23b6b051318d/controller/0.log" Sep 29 20:10:03 crc kubenswrapper[4779]: I0929 20:10:03.616391 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-4lq9x_be71c6d8-ba0b-46da-a558-f6c91cfddd59/cp-frr-files/0.log" Sep 29 20:10:03 crc kubenswrapper[4779]: I0929 20:10:03.802802 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-4lq9x_be71c6d8-ba0b-46da-a558-f6c91cfddd59/cp-reloader/0.log" Sep 29 20:10:03 crc kubenswrapper[4779]: I0929 20:10:03.813134 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-4lq9x_be71c6d8-ba0b-46da-a558-f6c91cfddd59/cp-frr-files/0.log" Sep 29 20:10:03 crc kubenswrapper[4779]: I0929 20:10:03.828234 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-4lq9x_be71c6d8-ba0b-46da-a558-f6c91cfddd59/cp-reloader/0.log" Sep 29 20:10:03 crc kubenswrapper[4779]: I0929 20:10:03.854309 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-4lq9x_be71c6d8-ba0b-46da-a558-f6c91cfddd59/cp-metrics/0.log" Sep 29 20:10:04 crc kubenswrapper[4779]: I0929 20:10:04.027587 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-4lq9x_be71c6d8-ba0b-46da-a558-f6c91cfddd59/cp-frr-files/0.log" Sep 29 20:10:04 crc kubenswrapper[4779]: I0929 20:10:04.031587 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-4lq9x_be71c6d8-ba0b-46da-a558-f6c91cfddd59/cp-reloader/0.log" Sep 29 20:10:04 crc kubenswrapper[4779]: I0929 20:10:04.058820 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-4lq9x_be71c6d8-ba0b-46da-a558-f6c91cfddd59/cp-metrics/0.log" Sep 29 20:10:04 crc kubenswrapper[4779]: I0929 20:10:04.129641 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-4lq9x_be71c6d8-ba0b-46da-a558-f6c91cfddd59/cp-metrics/0.log" Sep 29 20:10:04 crc kubenswrapper[4779]: I0929 20:10:04.251813 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-4lq9x_be71c6d8-ba0b-46da-a558-f6c91cfddd59/cp-frr-files/0.log" Sep 29 20:10:04 crc kubenswrapper[4779]: I0929 20:10:04.259118 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-4lq9x_be71c6d8-ba0b-46da-a558-f6c91cfddd59/cp-metrics/0.log" Sep 29 20:10:04 crc kubenswrapper[4779]: I0929 20:10:04.271178 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-4lq9x_be71c6d8-ba0b-46da-a558-f6c91cfddd59/cp-reloader/0.log" Sep 29 20:10:04 crc kubenswrapper[4779]: I0929 20:10:04.339716 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-4lq9x_be71c6d8-ba0b-46da-a558-f6c91cfddd59/controller/0.log" Sep 29 20:10:04 crc kubenswrapper[4779]: I0929 20:10:04.451690 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-4lq9x_be71c6d8-ba0b-46da-a558-f6c91cfddd59/frr-metrics/0.log" Sep 29 20:10:04 crc kubenswrapper[4779]: I0929 20:10:04.478192 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-4lq9x_be71c6d8-ba0b-46da-a558-f6c91cfddd59/kube-rbac-proxy/0.log" Sep 29 20:10:04 crc kubenswrapper[4779]: I0929 20:10:04.552535 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-4lq9x_be71c6d8-ba0b-46da-a558-f6c91cfddd59/kube-rbac-proxy-frr/0.log" Sep 29 20:10:04 crc kubenswrapper[4779]: I0929 20:10:04.666486 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-4lq9x_be71c6d8-ba0b-46da-a558-f6c91cfddd59/reloader/0.log" Sep 29 20:10:04 crc kubenswrapper[4779]: I0929 20:10:04.699667 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-webhook-server-5478bdb765-6f4p6_09b83c31-96ba-457f-9385-7a124ddbc54d/frr-k8s-webhook-server/0.log" Sep 29 20:10:04 crc kubenswrapper[4779]: I0929 20:10:04.919799 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-controller-manager-6ccfd99bc8-cd86r_bc87a3b2-72fa-4bca-9172-47b799399c7b/manager/0.log" Sep 29 20:10:05 crc kubenswrapper[4779]: I0929 20:10:05.056424 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-webhook-server-8764cbcb6-t7df9_3d88a574-ee3d-4b67-80c7-cb9ab603edfd/webhook-server/0.log" Sep 29 20:10:05 crc kubenswrapper[4779]: I0929 20:10:05.183512 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-2v7nb_b16e724b-3b5e-46e6-bb8a-1aebd631d549/kube-rbac-proxy/0.log" Sep 29 20:10:05 crc kubenswrapper[4779]: I0929 20:10:05.645558 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-2v7nb_b16e724b-3b5e-46e6-bb8a-1aebd631d549/speaker/0.log" Sep 29 20:10:05 crc kubenswrapper[4779]: I0929 20:10:05.750646 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-4lq9x_be71c6d8-ba0b-46da-a558-f6c91cfddd59/frr/0.log" Sep 29 20:10:17 crc kubenswrapper[4779]: I0929 20:10:17.568122 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcwrmrj_96d504b6-b993-413d-aa0a-6406515e6008/util/0.log" Sep 29 20:10:17 crc kubenswrapper[4779]: I0929 20:10:17.810680 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcwrmrj_96d504b6-b993-413d-aa0a-6406515e6008/util/0.log" Sep 29 20:10:17 crc kubenswrapper[4779]: I0929 20:10:17.849973 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcwrmrj_96d504b6-b993-413d-aa0a-6406515e6008/pull/0.log" Sep 29 20:10:17 crc kubenswrapper[4779]: I0929 20:10:17.871387 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcwrmrj_96d504b6-b993-413d-aa0a-6406515e6008/pull/0.log" Sep 29 20:10:18 crc kubenswrapper[4779]: I0929 20:10:18.020187 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcwrmrj_96d504b6-b993-413d-aa0a-6406515e6008/pull/0.log" Sep 29 20:10:18 crc kubenswrapper[4779]: I0929 20:10:18.026867 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcwrmrj_96d504b6-b993-413d-aa0a-6406515e6008/util/0.log" Sep 29 20:10:18 crc kubenswrapper[4779]: I0929 20:10:18.054478 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcwrmrj_96d504b6-b993-413d-aa0a-6406515e6008/extract/0.log" Sep 29 20:10:18 crc kubenswrapper[4779]: I0929 20:10:18.200812 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-hvfth_9bd42e07-23d9-49a8-b02d-bd11be36fc0c/extract-utilities/0.log" Sep 29 20:10:18 crc kubenswrapper[4779]: I0929 20:10:18.333580 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-hvfth_9bd42e07-23d9-49a8-b02d-bd11be36fc0c/extract-content/0.log" Sep 29 20:10:18 crc kubenswrapper[4779]: I0929 20:10:18.364879 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-hvfth_9bd42e07-23d9-49a8-b02d-bd11be36fc0c/extract-utilities/0.log" Sep 29 20:10:18 crc kubenswrapper[4779]: I0929 20:10:18.399206 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-hvfth_9bd42e07-23d9-49a8-b02d-bd11be36fc0c/extract-content/0.log" Sep 29 20:10:18 crc kubenswrapper[4779]: I0929 20:10:18.566906 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-hvfth_9bd42e07-23d9-49a8-b02d-bd11be36fc0c/extract-utilities/0.log" Sep 29 20:10:18 crc kubenswrapper[4779]: I0929 20:10:18.580081 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-hvfth_9bd42e07-23d9-49a8-b02d-bd11be36fc0c/extract-content/0.log" Sep 29 20:10:18 crc kubenswrapper[4779]: I0929 20:10:18.816161 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-rtkr2_97741d17-ab9c-415a-b310-38e8c914d91a/extract-utilities/0.log" Sep 29 20:10:18 crc kubenswrapper[4779]: I0929 20:10:18.962204 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-rtkr2_97741d17-ab9c-415a-b310-38e8c914d91a/extract-utilities/0.log" Sep 29 20:10:19 crc kubenswrapper[4779]: I0929 20:10:19.000654 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-rtkr2_97741d17-ab9c-415a-b310-38e8c914d91a/extract-content/0.log" Sep 29 20:10:19 crc kubenswrapper[4779]: I0929 20:10:19.034163 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-hvfth_9bd42e07-23d9-49a8-b02d-bd11be36fc0c/registry-server/0.log" Sep 29 20:10:19 crc kubenswrapper[4779]: I0929 20:10:19.048092 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-rtkr2_97741d17-ab9c-415a-b310-38e8c914d91a/extract-content/0.log" Sep 29 20:10:19 crc kubenswrapper[4779]: I0929 20:10:19.204567 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-rtkr2_97741d17-ab9c-415a-b310-38e8c914d91a/extract-utilities/0.log" Sep 29 20:10:19 crc kubenswrapper[4779]: I0929 20:10:19.212859 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-rtkr2_97741d17-ab9c-415a-b310-38e8c914d91a/extract-content/0.log" Sep 29 20:10:19 crc kubenswrapper[4779]: I0929 20:10:19.413872 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d968mxxp_a1246df7-aa2f-4e5a-9a55-99e92227fcbf/util/0.log" Sep 29 20:10:19 crc kubenswrapper[4779]: I0929 20:10:19.613663 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d968mxxp_a1246df7-aa2f-4e5a-9a55-99e92227fcbf/pull/0.log" Sep 29 20:10:19 crc kubenswrapper[4779]: I0929 20:10:19.706737 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-rtkr2_97741d17-ab9c-415a-b310-38e8c914d91a/registry-server/0.log" Sep 29 20:10:19 crc kubenswrapper[4779]: I0929 20:10:19.716787 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d968mxxp_a1246df7-aa2f-4e5a-9a55-99e92227fcbf/pull/0.log" Sep 29 20:10:19 crc kubenswrapper[4779]: I0929 20:10:19.737604 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d968mxxp_a1246df7-aa2f-4e5a-9a55-99e92227fcbf/util/0.log" Sep 29 20:10:19 crc kubenswrapper[4779]: I0929 20:10:19.837538 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d968mxxp_a1246df7-aa2f-4e5a-9a55-99e92227fcbf/util/0.log" Sep 29 20:10:19 crc kubenswrapper[4779]: I0929 20:10:19.877767 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d968mxxp_a1246df7-aa2f-4e5a-9a55-99e92227fcbf/pull/0.log" Sep 29 20:10:19 crc kubenswrapper[4779]: I0929 20:10:19.884584 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d968mxxp_a1246df7-aa2f-4e5a-9a55-99e92227fcbf/extract/0.log" Sep 29 20:10:20 crc kubenswrapper[4779]: I0929 20:10:20.061951 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-vcrlk_378c23da-08aa-4f09-9171-29a4f81908bb/marketplace-operator/0.log" Sep 29 20:10:20 crc kubenswrapper[4779]: I0929 20:10:20.098312 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-6w68s_664a3e86-9290-495f-9b9b-7b8fe5dc7177/extract-utilities/0.log" Sep 29 20:10:20 crc kubenswrapper[4779]: I0929 20:10:20.250284 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-6w68s_664a3e86-9290-495f-9b9b-7b8fe5dc7177/extract-utilities/0.log" Sep 29 20:10:20 crc kubenswrapper[4779]: I0929 20:10:20.279025 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-6w68s_664a3e86-9290-495f-9b9b-7b8fe5dc7177/extract-content/0.log" Sep 29 20:10:20 crc kubenswrapper[4779]: I0929 20:10:20.287623 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-6w68s_664a3e86-9290-495f-9b9b-7b8fe5dc7177/extract-content/0.log" Sep 29 20:10:20 crc kubenswrapper[4779]: I0929 20:10:20.488132 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-6w68s_664a3e86-9290-495f-9b9b-7b8fe5dc7177/extract-content/0.log" Sep 29 20:10:20 crc kubenswrapper[4779]: I0929 20:10:20.503231 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-6w68s_664a3e86-9290-495f-9b9b-7b8fe5dc7177/extract-utilities/0.log" Sep 29 20:10:20 crc kubenswrapper[4779]: I0929 20:10:20.568774 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-6w68s_664a3e86-9290-495f-9b9b-7b8fe5dc7177/registry-server/0.log" Sep 29 20:10:20 crc kubenswrapper[4779]: I0929 20:10:20.671211 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-9v764_23a92b93-fce1-410f-b088-394235c8d3b8/extract-utilities/0.log" Sep 29 20:10:20 crc kubenswrapper[4779]: I0929 20:10:20.807695 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-9v764_23a92b93-fce1-410f-b088-394235c8d3b8/extract-utilities/0.log" Sep 29 20:10:20 crc kubenswrapper[4779]: I0929 20:10:20.815809 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-9v764_23a92b93-fce1-410f-b088-394235c8d3b8/extract-content/0.log" Sep 29 20:10:20 crc kubenswrapper[4779]: I0929 20:10:20.885050 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-9v764_23a92b93-fce1-410f-b088-394235c8d3b8/extract-content/0.log" Sep 29 20:10:21 crc kubenswrapper[4779]: I0929 20:10:21.020035 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-9v764_23a92b93-fce1-410f-b088-394235c8d3b8/extract-utilities/0.log" Sep 29 20:10:21 crc kubenswrapper[4779]: I0929 20:10:21.023410 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-9v764_23a92b93-fce1-410f-b088-394235c8d3b8/extract-content/0.log" Sep 29 20:10:21 crc kubenswrapper[4779]: I0929 20:10:21.427222 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-9v764_23a92b93-fce1-410f-b088-394235c8d3b8/registry-server/0.log" Sep 29 20:10:45 crc kubenswrapper[4779]: I0929 20:10:45.426648 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-js5nd"] Sep 29 20:10:45 crc kubenswrapper[4779]: E0929 20:10:45.427677 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="85864f53-0af4-4656-b07e-ec58ad0c9d1f" containerName="container-00" Sep 29 20:10:45 crc kubenswrapper[4779]: I0929 20:10:45.427695 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="85864f53-0af4-4656-b07e-ec58ad0c9d1f" containerName="container-00" Sep 29 20:10:45 crc kubenswrapper[4779]: I0929 20:10:45.427945 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="85864f53-0af4-4656-b07e-ec58ad0c9d1f" containerName="container-00" Sep 29 20:10:45 crc kubenswrapper[4779]: I0929 20:10:45.429716 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-js5nd" Sep 29 20:10:45 crc kubenswrapper[4779]: I0929 20:10:45.442073 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-js5nd"] Sep 29 20:10:45 crc kubenswrapper[4779]: I0929 20:10:45.517950 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b05e6ae1-9c72-4dba-9443-05aa6cbf5992-catalog-content\") pod \"community-operators-js5nd\" (UID: \"b05e6ae1-9c72-4dba-9443-05aa6cbf5992\") " pod="openshift-marketplace/community-operators-js5nd" Sep 29 20:10:45 crc kubenswrapper[4779]: I0929 20:10:45.518071 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b05e6ae1-9c72-4dba-9443-05aa6cbf5992-utilities\") pod \"community-operators-js5nd\" (UID: \"b05e6ae1-9c72-4dba-9443-05aa6cbf5992\") " pod="openshift-marketplace/community-operators-js5nd" Sep 29 20:10:45 crc kubenswrapper[4779]: I0929 20:10:45.518331 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7g68q\" (UniqueName: \"kubernetes.io/projected/b05e6ae1-9c72-4dba-9443-05aa6cbf5992-kube-api-access-7g68q\") pod \"community-operators-js5nd\" (UID: \"b05e6ae1-9c72-4dba-9443-05aa6cbf5992\") " pod="openshift-marketplace/community-operators-js5nd" Sep 29 20:10:45 crc kubenswrapper[4779]: I0929 20:10:45.619990 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b05e6ae1-9c72-4dba-9443-05aa6cbf5992-utilities\") pod \"community-operators-js5nd\" (UID: \"b05e6ae1-9c72-4dba-9443-05aa6cbf5992\") " pod="openshift-marketplace/community-operators-js5nd" Sep 29 20:10:45 crc kubenswrapper[4779]: I0929 20:10:45.620098 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7g68q\" (UniqueName: \"kubernetes.io/projected/b05e6ae1-9c72-4dba-9443-05aa6cbf5992-kube-api-access-7g68q\") pod \"community-operators-js5nd\" (UID: \"b05e6ae1-9c72-4dba-9443-05aa6cbf5992\") " pod="openshift-marketplace/community-operators-js5nd" Sep 29 20:10:45 crc kubenswrapper[4779]: I0929 20:10:45.620201 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b05e6ae1-9c72-4dba-9443-05aa6cbf5992-catalog-content\") pod \"community-operators-js5nd\" (UID: \"b05e6ae1-9c72-4dba-9443-05aa6cbf5992\") " pod="openshift-marketplace/community-operators-js5nd" Sep 29 20:10:45 crc kubenswrapper[4779]: I0929 20:10:45.620718 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b05e6ae1-9c72-4dba-9443-05aa6cbf5992-catalog-content\") pod \"community-operators-js5nd\" (UID: \"b05e6ae1-9c72-4dba-9443-05aa6cbf5992\") " pod="openshift-marketplace/community-operators-js5nd" Sep 29 20:10:45 crc kubenswrapper[4779]: I0929 20:10:45.620809 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b05e6ae1-9c72-4dba-9443-05aa6cbf5992-utilities\") pod \"community-operators-js5nd\" (UID: \"b05e6ae1-9c72-4dba-9443-05aa6cbf5992\") " pod="openshift-marketplace/community-operators-js5nd" Sep 29 20:10:45 crc kubenswrapper[4779]: I0929 20:10:45.640406 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7g68q\" (UniqueName: \"kubernetes.io/projected/b05e6ae1-9c72-4dba-9443-05aa6cbf5992-kube-api-access-7g68q\") pod \"community-operators-js5nd\" (UID: \"b05e6ae1-9c72-4dba-9443-05aa6cbf5992\") " pod="openshift-marketplace/community-operators-js5nd" Sep 29 20:10:45 crc kubenswrapper[4779]: I0929 20:10:45.753724 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-js5nd" Sep 29 20:10:46 crc kubenswrapper[4779]: I0929 20:10:46.360763 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-js5nd"] Sep 29 20:10:46 crc kubenswrapper[4779]: I0929 20:10:46.393613 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-js5nd" event={"ID":"b05e6ae1-9c72-4dba-9443-05aa6cbf5992","Type":"ContainerStarted","Data":"f5948b2fec5997c399e058556f8e071a3f731a609df86fe45352eda01f98ec35"} Sep 29 20:10:47 crc kubenswrapper[4779]: I0929 20:10:47.403615 4779 generic.go:334] "Generic (PLEG): container finished" podID="b05e6ae1-9c72-4dba-9443-05aa6cbf5992" containerID="14dd29dfabcf1b304beb3f394c25e9b90e68b5a73eec53d337ae1e4776e48ced" exitCode=0 Sep 29 20:10:47 crc kubenswrapper[4779]: I0929 20:10:47.403796 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-js5nd" event={"ID":"b05e6ae1-9c72-4dba-9443-05aa6cbf5992","Type":"ContainerDied","Data":"14dd29dfabcf1b304beb3f394c25e9b90e68b5a73eec53d337ae1e4776e48ced"} Sep 29 20:10:47 crc kubenswrapper[4779]: I0929 20:10:47.405725 4779 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Sep 29 20:10:49 crc kubenswrapper[4779]: I0929 20:10:49.421607 4779 generic.go:334] "Generic (PLEG): container finished" podID="b05e6ae1-9c72-4dba-9443-05aa6cbf5992" containerID="d416da8572f52fb1ef6478c2ba6582be3a14a9a496ff6454a5f1c0c80bf38682" exitCode=0 Sep 29 20:10:49 crc kubenswrapper[4779]: I0929 20:10:49.421693 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-js5nd" event={"ID":"b05e6ae1-9c72-4dba-9443-05aa6cbf5992","Type":"ContainerDied","Data":"d416da8572f52fb1ef6478c2ba6582be3a14a9a496ff6454a5f1c0c80bf38682"} Sep 29 20:10:50 crc kubenswrapper[4779]: I0929 20:10:50.430744 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-js5nd" event={"ID":"b05e6ae1-9c72-4dba-9443-05aa6cbf5992","Type":"ContainerStarted","Data":"b7ccc568f307dcbc1aeedd796185c3f44db6a1562d5a979d3471cee0f8c27cd1"} Sep 29 20:10:50 crc kubenswrapper[4779]: I0929 20:10:50.451915 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-js5nd" podStartSLOduration=3.0633404889999998 podStartE2EDuration="5.451898878s" podCreationTimestamp="2025-09-29 20:10:45 +0000 UTC" firstStartedPulling="2025-09-29 20:10:47.405505051 +0000 UTC m=+3758.289930151" lastFinishedPulling="2025-09-29 20:10:49.79406344 +0000 UTC m=+3760.678488540" observedRunningTime="2025-09-29 20:10:50.446922432 +0000 UTC m=+3761.331347522" watchObservedRunningTime="2025-09-29 20:10:50.451898878 +0000 UTC m=+3761.336323978" Sep 29 20:10:55 crc kubenswrapper[4779]: I0929 20:10:55.754115 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-js5nd" Sep 29 20:10:55 crc kubenswrapper[4779]: I0929 20:10:55.754736 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-js5nd" Sep 29 20:10:55 crc kubenswrapper[4779]: I0929 20:10:55.815443 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-js5nd" Sep 29 20:10:56 crc kubenswrapper[4779]: I0929 20:10:56.556178 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-js5nd" Sep 29 20:10:56 crc kubenswrapper[4779]: I0929 20:10:56.605139 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-js5nd"] Sep 29 20:10:58 crc kubenswrapper[4779]: I0929 20:10:58.490623 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-js5nd" podUID="b05e6ae1-9c72-4dba-9443-05aa6cbf5992" containerName="registry-server" containerID="cri-o://b7ccc568f307dcbc1aeedd796185c3f44db6a1562d5a979d3471cee0f8c27cd1" gracePeriod=2 Sep 29 20:10:59 crc kubenswrapper[4779]: I0929 20:10:59.027901 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-js5nd" Sep 29 20:10:59 crc kubenswrapper[4779]: I0929 20:10:59.155791 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7g68q\" (UniqueName: \"kubernetes.io/projected/b05e6ae1-9c72-4dba-9443-05aa6cbf5992-kube-api-access-7g68q\") pod \"b05e6ae1-9c72-4dba-9443-05aa6cbf5992\" (UID: \"b05e6ae1-9c72-4dba-9443-05aa6cbf5992\") " Sep 29 20:10:59 crc kubenswrapper[4779]: I0929 20:10:59.155855 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b05e6ae1-9c72-4dba-9443-05aa6cbf5992-catalog-content\") pod \"b05e6ae1-9c72-4dba-9443-05aa6cbf5992\" (UID: \"b05e6ae1-9c72-4dba-9443-05aa6cbf5992\") " Sep 29 20:10:59 crc kubenswrapper[4779]: I0929 20:10:59.156012 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b05e6ae1-9c72-4dba-9443-05aa6cbf5992-utilities\") pod \"b05e6ae1-9c72-4dba-9443-05aa6cbf5992\" (UID: \"b05e6ae1-9c72-4dba-9443-05aa6cbf5992\") " Sep 29 20:10:59 crc kubenswrapper[4779]: I0929 20:10:59.157535 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b05e6ae1-9c72-4dba-9443-05aa6cbf5992-utilities" (OuterVolumeSpecName: "utilities") pod "b05e6ae1-9c72-4dba-9443-05aa6cbf5992" (UID: "b05e6ae1-9c72-4dba-9443-05aa6cbf5992"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 20:10:59 crc kubenswrapper[4779]: I0929 20:10:59.163011 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b05e6ae1-9c72-4dba-9443-05aa6cbf5992-kube-api-access-7g68q" (OuterVolumeSpecName: "kube-api-access-7g68q") pod "b05e6ae1-9c72-4dba-9443-05aa6cbf5992" (UID: "b05e6ae1-9c72-4dba-9443-05aa6cbf5992"). InnerVolumeSpecName "kube-api-access-7g68q". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 20:10:59 crc kubenswrapper[4779]: I0929 20:10:59.258657 4779 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b05e6ae1-9c72-4dba-9443-05aa6cbf5992-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 20:10:59 crc kubenswrapper[4779]: I0929 20:10:59.258689 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7g68q\" (UniqueName: \"kubernetes.io/projected/b05e6ae1-9c72-4dba-9443-05aa6cbf5992-kube-api-access-7g68q\") on node \"crc\" DevicePath \"\"" Sep 29 20:10:59 crc kubenswrapper[4779]: I0929 20:10:59.432879 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b05e6ae1-9c72-4dba-9443-05aa6cbf5992-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b05e6ae1-9c72-4dba-9443-05aa6cbf5992" (UID: "b05e6ae1-9c72-4dba-9443-05aa6cbf5992"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 20:10:59 crc kubenswrapper[4779]: I0929 20:10:59.462615 4779 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b05e6ae1-9c72-4dba-9443-05aa6cbf5992-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 20:10:59 crc kubenswrapper[4779]: I0929 20:10:59.501702 4779 generic.go:334] "Generic (PLEG): container finished" podID="b05e6ae1-9c72-4dba-9443-05aa6cbf5992" containerID="b7ccc568f307dcbc1aeedd796185c3f44db6a1562d5a979d3471cee0f8c27cd1" exitCode=0 Sep 29 20:10:59 crc kubenswrapper[4779]: I0929 20:10:59.501787 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-js5nd" Sep 29 20:10:59 crc kubenswrapper[4779]: I0929 20:10:59.501781 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-js5nd" event={"ID":"b05e6ae1-9c72-4dba-9443-05aa6cbf5992","Type":"ContainerDied","Data":"b7ccc568f307dcbc1aeedd796185c3f44db6a1562d5a979d3471cee0f8c27cd1"} Sep 29 20:10:59 crc kubenswrapper[4779]: I0929 20:10:59.501993 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-js5nd" event={"ID":"b05e6ae1-9c72-4dba-9443-05aa6cbf5992","Type":"ContainerDied","Data":"f5948b2fec5997c399e058556f8e071a3f731a609df86fe45352eda01f98ec35"} Sep 29 20:10:59 crc kubenswrapper[4779]: I0929 20:10:59.502045 4779 scope.go:117] "RemoveContainer" containerID="b7ccc568f307dcbc1aeedd796185c3f44db6a1562d5a979d3471cee0f8c27cd1" Sep 29 20:10:59 crc kubenswrapper[4779]: I0929 20:10:59.551231 4779 scope.go:117] "RemoveContainer" containerID="d416da8572f52fb1ef6478c2ba6582be3a14a9a496ff6454a5f1c0c80bf38682" Sep 29 20:10:59 crc kubenswrapper[4779]: I0929 20:10:59.557059 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-js5nd"] Sep 29 20:10:59 crc kubenswrapper[4779]: I0929 20:10:59.566690 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-js5nd"] Sep 29 20:10:59 crc kubenswrapper[4779]: I0929 20:10:59.585086 4779 scope.go:117] "RemoveContainer" containerID="14dd29dfabcf1b304beb3f394c25e9b90e68b5a73eec53d337ae1e4776e48ced" Sep 29 20:10:59 crc kubenswrapper[4779]: I0929 20:10:59.623816 4779 scope.go:117] "RemoveContainer" containerID="b7ccc568f307dcbc1aeedd796185c3f44db6a1562d5a979d3471cee0f8c27cd1" Sep 29 20:10:59 crc kubenswrapper[4779]: E0929 20:10:59.624187 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b7ccc568f307dcbc1aeedd796185c3f44db6a1562d5a979d3471cee0f8c27cd1\": container with ID starting with b7ccc568f307dcbc1aeedd796185c3f44db6a1562d5a979d3471cee0f8c27cd1 not found: ID does not exist" containerID="b7ccc568f307dcbc1aeedd796185c3f44db6a1562d5a979d3471cee0f8c27cd1" Sep 29 20:10:59 crc kubenswrapper[4779]: I0929 20:10:59.624219 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b7ccc568f307dcbc1aeedd796185c3f44db6a1562d5a979d3471cee0f8c27cd1"} err="failed to get container status \"b7ccc568f307dcbc1aeedd796185c3f44db6a1562d5a979d3471cee0f8c27cd1\": rpc error: code = NotFound desc = could not find container \"b7ccc568f307dcbc1aeedd796185c3f44db6a1562d5a979d3471cee0f8c27cd1\": container with ID starting with b7ccc568f307dcbc1aeedd796185c3f44db6a1562d5a979d3471cee0f8c27cd1 not found: ID does not exist" Sep 29 20:10:59 crc kubenswrapper[4779]: I0929 20:10:59.624239 4779 scope.go:117] "RemoveContainer" containerID="d416da8572f52fb1ef6478c2ba6582be3a14a9a496ff6454a5f1c0c80bf38682" Sep 29 20:10:59 crc kubenswrapper[4779]: E0929 20:10:59.624511 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d416da8572f52fb1ef6478c2ba6582be3a14a9a496ff6454a5f1c0c80bf38682\": container with ID starting with d416da8572f52fb1ef6478c2ba6582be3a14a9a496ff6454a5f1c0c80bf38682 not found: ID does not exist" containerID="d416da8572f52fb1ef6478c2ba6582be3a14a9a496ff6454a5f1c0c80bf38682" Sep 29 20:10:59 crc kubenswrapper[4779]: I0929 20:10:59.624553 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d416da8572f52fb1ef6478c2ba6582be3a14a9a496ff6454a5f1c0c80bf38682"} err="failed to get container status \"d416da8572f52fb1ef6478c2ba6582be3a14a9a496ff6454a5f1c0c80bf38682\": rpc error: code = NotFound desc = could not find container \"d416da8572f52fb1ef6478c2ba6582be3a14a9a496ff6454a5f1c0c80bf38682\": container with ID starting with d416da8572f52fb1ef6478c2ba6582be3a14a9a496ff6454a5f1c0c80bf38682 not found: ID does not exist" Sep 29 20:10:59 crc kubenswrapper[4779]: I0929 20:10:59.624580 4779 scope.go:117] "RemoveContainer" containerID="14dd29dfabcf1b304beb3f394c25e9b90e68b5a73eec53d337ae1e4776e48ced" Sep 29 20:10:59 crc kubenswrapper[4779]: E0929 20:10:59.624992 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"14dd29dfabcf1b304beb3f394c25e9b90e68b5a73eec53d337ae1e4776e48ced\": container with ID starting with 14dd29dfabcf1b304beb3f394c25e9b90e68b5a73eec53d337ae1e4776e48ced not found: ID does not exist" containerID="14dd29dfabcf1b304beb3f394c25e9b90e68b5a73eec53d337ae1e4776e48ced" Sep 29 20:10:59 crc kubenswrapper[4779]: I0929 20:10:59.625021 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"14dd29dfabcf1b304beb3f394c25e9b90e68b5a73eec53d337ae1e4776e48ced"} err="failed to get container status \"14dd29dfabcf1b304beb3f394c25e9b90e68b5a73eec53d337ae1e4776e48ced\": rpc error: code = NotFound desc = could not find container \"14dd29dfabcf1b304beb3f394c25e9b90e68b5a73eec53d337ae1e4776e48ced\": container with ID starting with 14dd29dfabcf1b304beb3f394c25e9b90e68b5a73eec53d337ae1e4776e48ced not found: ID does not exist" Sep 29 20:10:59 crc kubenswrapper[4779]: I0929 20:10:59.779702 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b05e6ae1-9c72-4dba-9443-05aa6cbf5992" path="/var/lib/kubelet/pods/b05e6ae1-9c72-4dba-9443-05aa6cbf5992/volumes" Sep 29 20:11:13 crc kubenswrapper[4779]: I0929 20:11:13.784725 4779 patch_prober.go:28] interesting pod/machine-config-daemon-d5cnr container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 20:11:13 crc kubenswrapper[4779]: I0929 20:11:13.786616 4779 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" podUID="476bc421-1113-455e-bcc8-e207e47dad19" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 20:11:43 crc kubenswrapper[4779]: I0929 20:11:43.785637 4779 patch_prober.go:28] interesting pod/machine-config-daemon-d5cnr container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 20:11:43 crc kubenswrapper[4779]: I0929 20:11:43.786229 4779 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" podUID="476bc421-1113-455e-bcc8-e207e47dad19" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 20:12:13 crc kubenswrapper[4779]: I0929 20:12:13.785349 4779 patch_prober.go:28] interesting pod/machine-config-daemon-d5cnr container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 20:12:13 crc kubenswrapper[4779]: I0929 20:12:13.785957 4779 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" podUID="476bc421-1113-455e-bcc8-e207e47dad19" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 20:12:13 crc kubenswrapper[4779]: I0929 20:12:13.786015 4779 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" Sep 29 20:12:13 crc kubenswrapper[4779]: I0929 20:12:13.787437 4779 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"ce7c9aaefc0e94314f1e9cd9f04a105e1ba3317878327817d7948af06e95f619"} pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 29 20:12:13 crc kubenswrapper[4779]: I0929 20:12:13.787527 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" podUID="476bc421-1113-455e-bcc8-e207e47dad19" containerName="machine-config-daemon" containerID="cri-o://ce7c9aaefc0e94314f1e9cd9f04a105e1ba3317878327817d7948af06e95f619" gracePeriod=600 Sep 29 20:12:13 crc kubenswrapper[4779]: E0929 20:12:13.916703 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d5cnr_openshift-machine-config-operator(476bc421-1113-455e-bcc8-e207e47dad19)\"" pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" podUID="476bc421-1113-455e-bcc8-e207e47dad19" Sep 29 20:12:14 crc kubenswrapper[4779]: I0929 20:12:14.364608 4779 generic.go:334] "Generic (PLEG): container finished" podID="476bc421-1113-455e-bcc8-e207e47dad19" containerID="ce7c9aaefc0e94314f1e9cd9f04a105e1ba3317878327817d7948af06e95f619" exitCode=0 Sep 29 20:12:14 crc kubenswrapper[4779]: I0929 20:12:14.364650 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" event={"ID":"476bc421-1113-455e-bcc8-e207e47dad19","Type":"ContainerDied","Data":"ce7c9aaefc0e94314f1e9cd9f04a105e1ba3317878327817d7948af06e95f619"} Sep 29 20:12:14 crc kubenswrapper[4779]: I0929 20:12:14.364683 4779 scope.go:117] "RemoveContainer" containerID="536cbc0e9dbc43b9fe51d71f773a7cc665ddb379a7083cffc33d4d69cea1ca23" Sep 29 20:12:14 crc kubenswrapper[4779]: I0929 20:12:14.365847 4779 scope.go:117] "RemoveContainer" containerID="ce7c9aaefc0e94314f1e9cd9f04a105e1ba3317878327817d7948af06e95f619" Sep 29 20:12:14 crc kubenswrapper[4779]: E0929 20:12:14.366637 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d5cnr_openshift-machine-config-operator(476bc421-1113-455e-bcc8-e207e47dad19)\"" pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" podUID="476bc421-1113-455e-bcc8-e207e47dad19" Sep 29 20:12:15 crc kubenswrapper[4779]: I0929 20:12:15.383603 4779 generic.go:334] "Generic (PLEG): container finished" podID="4636abcd-6c3b-451c-be55-f51d93252d3f" containerID="111dcbc3a8e2986b20f582f98b90ad771a363aafb71dff8b6325d4ba5f504e0f" exitCode=0 Sep 29 20:12:15 crc kubenswrapper[4779]: I0929 20:12:15.383672 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-jnvd9/must-gather-44n2z" event={"ID":"4636abcd-6c3b-451c-be55-f51d93252d3f","Type":"ContainerDied","Data":"111dcbc3a8e2986b20f582f98b90ad771a363aafb71dff8b6325d4ba5f504e0f"} Sep 29 20:12:15 crc kubenswrapper[4779]: I0929 20:12:15.384602 4779 scope.go:117] "RemoveContainer" containerID="111dcbc3a8e2986b20f582f98b90ad771a363aafb71dff8b6325d4ba5f504e0f" Sep 29 20:12:16 crc kubenswrapper[4779]: I0929 20:12:16.105638 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-jnvd9_must-gather-44n2z_4636abcd-6c3b-451c-be55-f51d93252d3f/gather/0.log" Sep 29 20:12:24 crc kubenswrapper[4779]: I0929 20:12:24.475108 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-jnvd9/must-gather-44n2z"] Sep 29 20:12:24 crc kubenswrapper[4779]: I0929 20:12:24.476056 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-must-gather-jnvd9/must-gather-44n2z" podUID="4636abcd-6c3b-451c-be55-f51d93252d3f" containerName="copy" containerID="cri-o://902d1e8a653c6c0ddaf0546c3936c5f2fb7e9a6ddc0f5cc0d8a5c61fe747698f" gracePeriod=2 Sep 29 20:12:24 crc kubenswrapper[4779]: I0929 20:12:24.483004 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-jnvd9/must-gather-44n2z"] Sep 29 20:12:25 crc kubenswrapper[4779]: I0929 20:12:25.012628 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-jnvd9_must-gather-44n2z_4636abcd-6c3b-451c-be55-f51d93252d3f/copy/0.log" Sep 29 20:12:25 crc kubenswrapper[4779]: I0929 20:12:25.013220 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-jnvd9/must-gather-44n2z" Sep 29 20:12:25 crc kubenswrapper[4779]: I0929 20:12:25.116437 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4mlz5\" (UniqueName: \"kubernetes.io/projected/4636abcd-6c3b-451c-be55-f51d93252d3f-kube-api-access-4mlz5\") pod \"4636abcd-6c3b-451c-be55-f51d93252d3f\" (UID: \"4636abcd-6c3b-451c-be55-f51d93252d3f\") " Sep 29 20:12:25 crc kubenswrapper[4779]: I0929 20:12:25.116558 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/4636abcd-6c3b-451c-be55-f51d93252d3f-must-gather-output\") pod \"4636abcd-6c3b-451c-be55-f51d93252d3f\" (UID: \"4636abcd-6c3b-451c-be55-f51d93252d3f\") " Sep 29 20:12:25 crc kubenswrapper[4779]: I0929 20:12:25.137725 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4636abcd-6c3b-451c-be55-f51d93252d3f-kube-api-access-4mlz5" (OuterVolumeSpecName: "kube-api-access-4mlz5") pod "4636abcd-6c3b-451c-be55-f51d93252d3f" (UID: "4636abcd-6c3b-451c-be55-f51d93252d3f"). InnerVolumeSpecName "kube-api-access-4mlz5". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 20:12:25 crc kubenswrapper[4779]: I0929 20:12:25.218935 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4mlz5\" (UniqueName: \"kubernetes.io/projected/4636abcd-6c3b-451c-be55-f51d93252d3f-kube-api-access-4mlz5\") on node \"crc\" DevicePath \"\"" Sep 29 20:12:25 crc kubenswrapper[4779]: I0929 20:12:25.271148 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4636abcd-6c3b-451c-be55-f51d93252d3f-must-gather-output" (OuterVolumeSpecName: "must-gather-output") pod "4636abcd-6c3b-451c-be55-f51d93252d3f" (UID: "4636abcd-6c3b-451c-be55-f51d93252d3f"). InnerVolumeSpecName "must-gather-output". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 20:12:25 crc kubenswrapper[4779]: I0929 20:12:25.320656 4779 reconciler_common.go:293] "Volume detached for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/4636abcd-6c3b-451c-be55-f51d93252d3f-must-gather-output\") on node \"crc\" DevicePath \"\"" Sep 29 20:12:25 crc kubenswrapper[4779]: I0929 20:12:25.496642 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-jnvd9_must-gather-44n2z_4636abcd-6c3b-451c-be55-f51d93252d3f/copy/0.log" Sep 29 20:12:25 crc kubenswrapper[4779]: I0929 20:12:25.497037 4779 generic.go:334] "Generic (PLEG): container finished" podID="4636abcd-6c3b-451c-be55-f51d93252d3f" containerID="902d1e8a653c6c0ddaf0546c3936c5f2fb7e9a6ddc0f5cc0d8a5c61fe747698f" exitCode=143 Sep 29 20:12:25 crc kubenswrapper[4779]: I0929 20:12:25.497089 4779 scope.go:117] "RemoveContainer" containerID="902d1e8a653c6c0ddaf0546c3936c5f2fb7e9a6ddc0f5cc0d8a5c61fe747698f" Sep 29 20:12:25 crc kubenswrapper[4779]: I0929 20:12:25.497141 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-jnvd9/must-gather-44n2z" Sep 29 20:12:25 crc kubenswrapper[4779]: I0929 20:12:25.534608 4779 scope.go:117] "RemoveContainer" containerID="111dcbc3a8e2986b20f582f98b90ad771a363aafb71dff8b6325d4ba5f504e0f" Sep 29 20:12:25 crc kubenswrapper[4779]: I0929 20:12:25.617006 4779 scope.go:117] "RemoveContainer" containerID="902d1e8a653c6c0ddaf0546c3936c5f2fb7e9a6ddc0f5cc0d8a5c61fe747698f" Sep 29 20:12:25 crc kubenswrapper[4779]: E0929 20:12:25.617492 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"902d1e8a653c6c0ddaf0546c3936c5f2fb7e9a6ddc0f5cc0d8a5c61fe747698f\": container with ID starting with 902d1e8a653c6c0ddaf0546c3936c5f2fb7e9a6ddc0f5cc0d8a5c61fe747698f not found: ID does not exist" containerID="902d1e8a653c6c0ddaf0546c3936c5f2fb7e9a6ddc0f5cc0d8a5c61fe747698f" Sep 29 20:12:25 crc kubenswrapper[4779]: I0929 20:12:25.617537 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"902d1e8a653c6c0ddaf0546c3936c5f2fb7e9a6ddc0f5cc0d8a5c61fe747698f"} err="failed to get container status \"902d1e8a653c6c0ddaf0546c3936c5f2fb7e9a6ddc0f5cc0d8a5c61fe747698f\": rpc error: code = NotFound desc = could not find container \"902d1e8a653c6c0ddaf0546c3936c5f2fb7e9a6ddc0f5cc0d8a5c61fe747698f\": container with ID starting with 902d1e8a653c6c0ddaf0546c3936c5f2fb7e9a6ddc0f5cc0d8a5c61fe747698f not found: ID does not exist" Sep 29 20:12:25 crc kubenswrapper[4779]: I0929 20:12:25.617564 4779 scope.go:117] "RemoveContainer" containerID="111dcbc3a8e2986b20f582f98b90ad771a363aafb71dff8b6325d4ba5f504e0f" Sep 29 20:12:25 crc kubenswrapper[4779]: E0929 20:12:25.618360 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"111dcbc3a8e2986b20f582f98b90ad771a363aafb71dff8b6325d4ba5f504e0f\": container with ID starting with 111dcbc3a8e2986b20f582f98b90ad771a363aafb71dff8b6325d4ba5f504e0f not found: ID does not exist" containerID="111dcbc3a8e2986b20f582f98b90ad771a363aafb71dff8b6325d4ba5f504e0f" Sep 29 20:12:25 crc kubenswrapper[4779]: I0929 20:12:25.618392 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"111dcbc3a8e2986b20f582f98b90ad771a363aafb71dff8b6325d4ba5f504e0f"} err="failed to get container status \"111dcbc3a8e2986b20f582f98b90ad771a363aafb71dff8b6325d4ba5f504e0f\": rpc error: code = NotFound desc = could not find container \"111dcbc3a8e2986b20f582f98b90ad771a363aafb71dff8b6325d4ba5f504e0f\": container with ID starting with 111dcbc3a8e2986b20f582f98b90ad771a363aafb71dff8b6325d4ba5f504e0f not found: ID does not exist" Sep 29 20:12:25 crc kubenswrapper[4779]: I0929 20:12:25.781613 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4636abcd-6c3b-451c-be55-f51d93252d3f" path="/var/lib/kubelet/pods/4636abcd-6c3b-451c-be55-f51d93252d3f/volumes" Sep 29 20:12:27 crc kubenswrapper[4779]: I0929 20:12:27.767217 4779 scope.go:117] "RemoveContainer" containerID="ce7c9aaefc0e94314f1e9cd9f04a105e1ba3317878327817d7948af06e95f619" Sep 29 20:12:27 crc kubenswrapper[4779]: E0929 20:12:27.767870 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d5cnr_openshift-machine-config-operator(476bc421-1113-455e-bcc8-e207e47dad19)\"" pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" podUID="476bc421-1113-455e-bcc8-e207e47dad19" Sep 29 20:12:41 crc kubenswrapper[4779]: I0929 20:12:41.766625 4779 scope.go:117] "RemoveContainer" containerID="ce7c9aaefc0e94314f1e9cd9f04a105e1ba3317878327817d7948af06e95f619" Sep 29 20:12:41 crc kubenswrapper[4779]: E0929 20:12:41.767362 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d5cnr_openshift-machine-config-operator(476bc421-1113-455e-bcc8-e207e47dad19)\"" pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" podUID="476bc421-1113-455e-bcc8-e207e47dad19" Sep 29 20:12:56 crc kubenswrapper[4779]: I0929 20:12:56.766884 4779 scope.go:117] "RemoveContainer" containerID="ce7c9aaefc0e94314f1e9cd9f04a105e1ba3317878327817d7948af06e95f619" Sep 29 20:12:56 crc kubenswrapper[4779]: E0929 20:12:56.767727 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d5cnr_openshift-machine-config-operator(476bc421-1113-455e-bcc8-e207e47dad19)\"" pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" podUID="476bc421-1113-455e-bcc8-e207e47dad19" Sep 29 20:13:11 crc kubenswrapper[4779]: I0929 20:13:11.766780 4779 scope.go:117] "RemoveContainer" containerID="ce7c9aaefc0e94314f1e9cd9f04a105e1ba3317878327817d7948af06e95f619" Sep 29 20:13:11 crc kubenswrapper[4779]: E0929 20:13:11.767655 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d5cnr_openshift-machine-config-operator(476bc421-1113-455e-bcc8-e207e47dad19)\"" pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" podUID="476bc421-1113-455e-bcc8-e207e47dad19" Sep 29 20:13:12 crc kubenswrapper[4779]: I0929 20:13:12.857390 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-q2hkw/must-gather-f252t"] Sep 29 20:13:12 crc kubenswrapper[4779]: E0929 20:13:12.858119 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4636abcd-6c3b-451c-be55-f51d93252d3f" containerName="copy" Sep 29 20:13:12 crc kubenswrapper[4779]: I0929 20:13:12.858134 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="4636abcd-6c3b-451c-be55-f51d93252d3f" containerName="copy" Sep 29 20:13:12 crc kubenswrapper[4779]: E0929 20:13:12.858148 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b05e6ae1-9c72-4dba-9443-05aa6cbf5992" containerName="registry-server" Sep 29 20:13:12 crc kubenswrapper[4779]: I0929 20:13:12.858157 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="b05e6ae1-9c72-4dba-9443-05aa6cbf5992" containerName="registry-server" Sep 29 20:13:12 crc kubenswrapper[4779]: E0929 20:13:12.858197 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b05e6ae1-9c72-4dba-9443-05aa6cbf5992" containerName="extract-content" Sep 29 20:13:12 crc kubenswrapper[4779]: I0929 20:13:12.858207 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="b05e6ae1-9c72-4dba-9443-05aa6cbf5992" containerName="extract-content" Sep 29 20:13:12 crc kubenswrapper[4779]: E0929 20:13:12.858226 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b05e6ae1-9c72-4dba-9443-05aa6cbf5992" containerName="extract-utilities" Sep 29 20:13:12 crc kubenswrapper[4779]: I0929 20:13:12.858236 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="b05e6ae1-9c72-4dba-9443-05aa6cbf5992" containerName="extract-utilities" Sep 29 20:13:12 crc kubenswrapper[4779]: E0929 20:13:12.858265 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4636abcd-6c3b-451c-be55-f51d93252d3f" containerName="gather" Sep 29 20:13:12 crc kubenswrapper[4779]: I0929 20:13:12.858273 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="4636abcd-6c3b-451c-be55-f51d93252d3f" containerName="gather" Sep 29 20:13:12 crc kubenswrapper[4779]: I0929 20:13:12.858541 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="4636abcd-6c3b-451c-be55-f51d93252d3f" containerName="gather" Sep 29 20:13:12 crc kubenswrapper[4779]: I0929 20:13:12.858571 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="b05e6ae1-9c72-4dba-9443-05aa6cbf5992" containerName="registry-server" Sep 29 20:13:12 crc kubenswrapper[4779]: I0929 20:13:12.858590 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="4636abcd-6c3b-451c-be55-f51d93252d3f" containerName="copy" Sep 29 20:13:12 crc kubenswrapper[4779]: I0929 20:13:12.859584 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-q2hkw/must-gather-f252t" Sep 29 20:13:12 crc kubenswrapper[4779]: I0929 20:13:12.862292 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-q2hkw"/"kube-root-ca.crt" Sep 29 20:13:12 crc kubenswrapper[4779]: I0929 20:13:12.862301 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-q2hkw"/"openshift-service-ca.crt" Sep 29 20:13:12 crc kubenswrapper[4779]: I0929 20:13:12.862874 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-must-gather-q2hkw"/"default-dockercfg-5s44j" Sep 29 20:13:12 crc kubenswrapper[4779]: I0929 20:13:12.881918 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-q2hkw/must-gather-f252t"] Sep 29 20:13:13 crc kubenswrapper[4779]: I0929 20:13:13.000747 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/09948b09-d4c9-4ff2-a27d-396128e19259-must-gather-output\") pod \"must-gather-f252t\" (UID: \"09948b09-d4c9-4ff2-a27d-396128e19259\") " pod="openshift-must-gather-q2hkw/must-gather-f252t" Sep 29 20:13:13 crc kubenswrapper[4779]: I0929 20:13:13.000811 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nxjd7\" (UniqueName: \"kubernetes.io/projected/09948b09-d4c9-4ff2-a27d-396128e19259-kube-api-access-nxjd7\") pod \"must-gather-f252t\" (UID: \"09948b09-d4c9-4ff2-a27d-396128e19259\") " pod="openshift-must-gather-q2hkw/must-gather-f252t" Sep 29 20:13:13 crc kubenswrapper[4779]: I0929 20:13:13.101725 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/09948b09-d4c9-4ff2-a27d-396128e19259-must-gather-output\") pod \"must-gather-f252t\" (UID: \"09948b09-d4c9-4ff2-a27d-396128e19259\") " pod="openshift-must-gather-q2hkw/must-gather-f252t" Sep 29 20:13:13 crc kubenswrapper[4779]: I0929 20:13:13.101782 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nxjd7\" (UniqueName: \"kubernetes.io/projected/09948b09-d4c9-4ff2-a27d-396128e19259-kube-api-access-nxjd7\") pod \"must-gather-f252t\" (UID: \"09948b09-d4c9-4ff2-a27d-396128e19259\") " pod="openshift-must-gather-q2hkw/must-gather-f252t" Sep 29 20:13:13 crc kubenswrapper[4779]: I0929 20:13:13.102111 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/09948b09-d4c9-4ff2-a27d-396128e19259-must-gather-output\") pod \"must-gather-f252t\" (UID: \"09948b09-d4c9-4ff2-a27d-396128e19259\") " pod="openshift-must-gather-q2hkw/must-gather-f252t" Sep 29 20:13:13 crc kubenswrapper[4779]: I0929 20:13:13.122188 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nxjd7\" (UniqueName: \"kubernetes.io/projected/09948b09-d4c9-4ff2-a27d-396128e19259-kube-api-access-nxjd7\") pod \"must-gather-f252t\" (UID: \"09948b09-d4c9-4ff2-a27d-396128e19259\") " pod="openshift-must-gather-q2hkw/must-gather-f252t" Sep 29 20:13:13 crc kubenswrapper[4779]: I0929 20:13:13.179514 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-q2hkw/must-gather-f252t" Sep 29 20:13:13 crc kubenswrapper[4779]: I0929 20:13:13.670805 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-q2hkw/must-gather-f252t"] Sep 29 20:13:13 crc kubenswrapper[4779]: I0929 20:13:13.993562 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-q2hkw/must-gather-f252t" event={"ID":"09948b09-d4c9-4ff2-a27d-396128e19259","Type":"ContainerStarted","Data":"70a720d1923c20f7a8622ef0f39e30baf3171cdf91eb94cad8a8d3250a6cef0e"} Sep 29 20:13:15 crc kubenswrapper[4779]: I0929 20:13:15.009371 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-q2hkw/must-gather-f252t" event={"ID":"09948b09-d4c9-4ff2-a27d-396128e19259","Type":"ContainerStarted","Data":"353ad72802efc09d3b791a966223a9f63627efaf7a218ecca8e43585e461de9a"} Sep 29 20:13:15 crc kubenswrapper[4779]: I0929 20:13:15.009761 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-q2hkw/must-gather-f252t" event={"ID":"09948b09-d4c9-4ff2-a27d-396128e19259","Type":"ContainerStarted","Data":"345aecbfb226ddf49edd41e9132fd246b7bf58ebba4601b387fdb87d26154810"} Sep 29 20:13:17 crc kubenswrapper[4779]: I0929 20:13:17.623477 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-q2hkw/must-gather-f252t" podStartSLOduration=5.6234579369999995 podStartE2EDuration="5.623457937s" podCreationTimestamp="2025-09-29 20:13:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 20:13:15.047029986 +0000 UTC m=+3905.931455096" watchObservedRunningTime="2025-09-29 20:13:17.623457937 +0000 UTC m=+3908.507883037" Sep 29 20:13:17 crc kubenswrapper[4779]: I0929 20:13:17.626657 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-q2hkw/crc-debug-wxmdc"] Sep 29 20:13:17 crc kubenswrapper[4779]: I0929 20:13:17.628126 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-q2hkw/crc-debug-wxmdc" Sep 29 20:13:17 crc kubenswrapper[4779]: I0929 20:13:17.805569 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z85fc\" (UniqueName: \"kubernetes.io/projected/ee3b1085-55b4-4841-bfb5-af563f17b394-kube-api-access-z85fc\") pod \"crc-debug-wxmdc\" (UID: \"ee3b1085-55b4-4841-bfb5-af563f17b394\") " pod="openshift-must-gather-q2hkw/crc-debug-wxmdc" Sep 29 20:13:17 crc kubenswrapper[4779]: I0929 20:13:17.805638 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/ee3b1085-55b4-4841-bfb5-af563f17b394-host\") pod \"crc-debug-wxmdc\" (UID: \"ee3b1085-55b4-4841-bfb5-af563f17b394\") " pod="openshift-must-gather-q2hkw/crc-debug-wxmdc" Sep 29 20:13:17 crc kubenswrapper[4779]: I0929 20:13:17.907206 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z85fc\" (UniqueName: \"kubernetes.io/projected/ee3b1085-55b4-4841-bfb5-af563f17b394-kube-api-access-z85fc\") pod \"crc-debug-wxmdc\" (UID: \"ee3b1085-55b4-4841-bfb5-af563f17b394\") " pod="openshift-must-gather-q2hkw/crc-debug-wxmdc" Sep 29 20:13:17 crc kubenswrapper[4779]: I0929 20:13:17.907266 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/ee3b1085-55b4-4841-bfb5-af563f17b394-host\") pod \"crc-debug-wxmdc\" (UID: \"ee3b1085-55b4-4841-bfb5-af563f17b394\") " pod="openshift-must-gather-q2hkw/crc-debug-wxmdc" Sep 29 20:13:17 crc kubenswrapper[4779]: I0929 20:13:17.907596 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/ee3b1085-55b4-4841-bfb5-af563f17b394-host\") pod \"crc-debug-wxmdc\" (UID: \"ee3b1085-55b4-4841-bfb5-af563f17b394\") " pod="openshift-must-gather-q2hkw/crc-debug-wxmdc" Sep 29 20:13:17 crc kubenswrapper[4779]: I0929 20:13:17.939901 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z85fc\" (UniqueName: \"kubernetes.io/projected/ee3b1085-55b4-4841-bfb5-af563f17b394-kube-api-access-z85fc\") pod \"crc-debug-wxmdc\" (UID: \"ee3b1085-55b4-4841-bfb5-af563f17b394\") " pod="openshift-must-gather-q2hkw/crc-debug-wxmdc" Sep 29 20:13:17 crc kubenswrapper[4779]: I0929 20:13:17.958473 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-q2hkw/crc-debug-wxmdc" Sep 29 20:13:18 crc kubenswrapper[4779]: I0929 20:13:18.016003 4779 scope.go:117] "RemoveContainer" containerID="2b1b5e83d4e4b3f186a56de03b4f18e1eeee2f49cc9fb0ca8a29c7efd2a66a7c" Sep 29 20:13:18 crc kubenswrapper[4779]: I0929 20:13:18.036408 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-q2hkw/crc-debug-wxmdc" event={"ID":"ee3b1085-55b4-4841-bfb5-af563f17b394","Type":"ContainerStarted","Data":"2985da5c7fcff9aca2549708e18f27b8134311fabf8142f01be03c4d5f8ccdd7"} Sep 29 20:13:19 crc kubenswrapper[4779]: I0929 20:13:19.046717 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-q2hkw/crc-debug-wxmdc" event={"ID":"ee3b1085-55b4-4841-bfb5-af563f17b394","Type":"ContainerStarted","Data":"b786ea67e3cb0aff4e1644987b428d6109c88bb5e2a692997b38b4c33afe493f"} Sep 29 20:13:19 crc kubenswrapper[4779]: I0929 20:13:19.065960 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-q2hkw/crc-debug-wxmdc" podStartSLOduration=2.065937875 podStartE2EDuration="2.065937875s" podCreationTimestamp="2025-09-29 20:13:17 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 20:13:19.059620153 +0000 UTC m=+3909.944045253" watchObservedRunningTime="2025-09-29 20:13:19.065937875 +0000 UTC m=+3909.950362975" Sep 29 20:13:25 crc kubenswrapper[4779]: I0929 20:13:25.766069 4779 scope.go:117] "RemoveContainer" containerID="ce7c9aaefc0e94314f1e9cd9f04a105e1ba3317878327817d7948af06e95f619" Sep 29 20:13:25 crc kubenswrapper[4779]: E0929 20:13:25.766786 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d5cnr_openshift-machine-config-operator(476bc421-1113-455e-bcc8-e207e47dad19)\"" pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" podUID="476bc421-1113-455e-bcc8-e207e47dad19" Sep 29 20:13:40 crc kubenswrapper[4779]: I0929 20:13:40.767573 4779 scope.go:117] "RemoveContainer" containerID="ce7c9aaefc0e94314f1e9cd9f04a105e1ba3317878327817d7948af06e95f619" Sep 29 20:13:40 crc kubenswrapper[4779]: E0929 20:13:40.768385 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d5cnr_openshift-machine-config-operator(476bc421-1113-455e-bcc8-e207e47dad19)\"" pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" podUID="476bc421-1113-455e-bcc8-e207e47dad19" Sep 29 20:13:52 crc kubenswrapper[4779]: I0929 20:13:52.766278 4779 scope.go:117] "RemoveContainer" containerID="ce7c9aaefc0e94314f1e9cd9f04a105e1ba3317878327817d7948af06e95f619" Sep 29 20:13:52 crc kubenswrapper[4779]: E0929 20:13:52.766945 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d5cnr_openshift-machine-config-operator(476bc421-1113-455e-bcc8-e207e47dad19)\"" pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" podUID="476bc421-1113-455e-bcc8-e207e47dad19" Sep 29 20:13:59 crc kubenswrapper[4779]: I0929 20:13:59.341474 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-4glfd"] Sep 29 20:13:59 crc kubenswrapper[4779]: I0929 20:13:59.343917 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-4glfd" Sep 29 20:13:59 crc kubenswrapper[4779]: I0929 20:13:59.352313 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-4glfd"] Sep 29 20:13:59 crc kubenswrapper[4779]: I0929 20:13:59.369005 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h4gmq\" (UniqueName: \"kubernetes.io/projected/6632e577-c567-485b-ab43-768b217fc3dd-kube-api-access-h4gmq\") pod \"redhat-marketplace-4glfd\" (UID: \"6632e577-c567-485b-ab43-768b217fc3dd\") " pod="openshift-marketplace/redhat-marketplace-4glfd" Sep 29 20:13:59 crc kubenswrapper[4779]: I0929 20:13:59.369131 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6632e577-c567-485b-ab43-768b217fc3dd-catalog-content\") pod \"redhat-marketplace-4glfd\" (UID: \"6632e577-c567-485b-ab43-768b217fc3dd\") " pod="openshift-marketplace/redhat-marketplace-4glfd" Sep 29 20:13:59 crc kubenswrapper[4779]: I0929 20:13:59.369180 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6632e577-c567-485b-ab43-768b217fc3dd-utilities\") pod \"redhat-marketplace-4glfd\" (UID: \"6632e577-c567-485b-ab43-768b217fc3dd\") " pod="openshift-marketplace/redhat-marketplace-4glfd" Sep 29 20:13:59 crc kubenswrapper[4779]: I0929 20:13:59.471533 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6632e577-c567-485b-ab43-768b217fc3dd-utilities\") pod \"redhat-marketplace-4glfd\" (UID: \"6632e577-c567-485b-ab43-768b217fc3dd\") " pod="openshift-marketplace/redhat-marketplace-4glfd" Sep 29 20:13:59 crc kubenswrapper[4779]: I0929 20:13:59.472029 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h4gmq\" (UniqueName: \"kubernetes.io/projected/6632e577-c567-485b-ab43-768b217fc3dd-kube-api-access-h4gmq\") pod \"redhat-marketplace-4glfd\" (UID: \"6632e577-c567-485b-ab43-768b217fc3dd\") " pod="openshift-marketplace/redhat-marketplace-4glfd" Sep 29 20:13:59 crc kubenswrapper[4779]: I0929 20:13:59.472107 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6632e577-c567-485b-ab43-768b217fc3dd-catalog-content\") pod \"redhat-marketplace-4glfd\" (UID: \"6632e577-c567-485b-ab43-768b217fc3dd\") " pod="openshift-marketplace/redhat-marketplace-4glfd" Sep 29 20:13:59 crc kubenswrapper[4779]: I0929 20:13:59.473040 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6632e577-c567-485b-ab43-768b217fc3dd-catalog-content\") pod \"redhat-marketplace-4glfd\" (UID: \"6632e577-c567-485b-ab43-768b217fc3dd\") " pod="openshift-marketplace/redhat-marketplace-4glfd" Sep 29 20:13:59 crc kubenswrapper[4779]: I0929 20:13:59.473043 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6632e577-c567-485b-ab43-768b217fc3dd-utilities\") pod \"redhat-marketplace-4glfd\" (UID: \"6632e577-c567-485b-ab43-768b217fc3dd\") " pod="openshift-marketplace/redhat-marketplace-4glfd" Sep 29 20:13:59 crc kubenswrapper[4779]: I0929 20:13:59.511520 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h4gmq\" (UniqueName: \"kubernetes.io/projected/6632e577-c567-485b-ab43-768b217fc3dd-kube-api-access-h4gmq\") pod \"redhat-marketplace-4glfd\" (UID: \"6632e577-c567-485b-ab43-768b217fc3dd\") " pod="openshift-marketplace/redhat-marketplace-4glfd" Sep 29 20:13:59 crc kubenswrapper[4779]: I0929 20:13:59.678332 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-4glfd" Sep 29 20:14:00 crc kubenswrapper[4779]: I0929 20:14:00.181110 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-4glfd"] Sep 29 20:14:00 crc kubenswrapper[4779]: I0929 20:14:00.414716 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-4glfd" event={"ID":"6632e577-c567-485b-ab43-768b217fc3dd","Type":"ContainerStarted","Data":"e77a94a8c503625ef53273a951c2bbdee6c537be79a30af92257a20f90483470"} Sep 29 20:14:01 crc kubenswrapper[4779]: I0929 20:14:01.424903 4779 generic.go:334] "Generic (PLEG): container finished" podID="6632e577-c567-485b-ab43-768b217fc3dd" containerID="9c56e31f44c0fd66c027d863b9485f7c5091596719e512646bcb82e0ca02160a" exitCode=0 Sep 29 20:14:01 crc kubenswrapper[4779]: I0929 20:14:01.424999 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-4glfd" event={"ID":"6632e577-c567-485b-ab43-768b217fc3dd","Type":"ContainerDied","Data":"9c56e31f44c0fd66c027d863b9485f7c5091596719e512646bcb82e0ca02160a"} Sep 29 20:14:02 crc kubenswrapper[4779]: I0929 20:14:02.433722 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-4glfd" event={"ID":"6632e577-c567-485b-ab43-768b217fc3dd","Type":"ContainerStarted","Data":"20a9503e8df952975ae20819add448c1ec1c0c59d081ffb4ccc8357097e3eea3"} Sep 29 20:14:03 crc kubenswrapper[4779]: I0929 20:14:03.446552 4779 generic.go:334] "Generic (PLEG): container finished" podID="6632e577-c567-485b-ab43-768b217fc3dd" containerID="20a9503e8df952975ae20819add448c1ec1c0c59d081ffb4ccc8357097e3eea3" exitCode=0 Sep 29 20:14:03 crc kubenswrapper[4779]: I0929 20:14:03.447718 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-4glfd" event={"ID":"6632e577-c567-485b-ab43-768b217fc3dd","Type":"ContainerDied","Data":"20a9503e8df952975ae20819add448c1ec1c0c59d081ffb4ccc8357097e3eea3"} Sep 29 20:14:03 crc kubenswrapper[4779]: I0929 20:14:03.766569 4779 scope.go:117] "RemoveContainer" containerID="ce7c9aaefc0e94314f1e9cd9f04a105e1ba3317878327817d7948af06e95f619" Sep 29 20:14:03 crc kubenswrapper[4779]: E0929 20:14:03.767560 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d5cnr_openshift-machine-config-operator(476bc421-1113-455e-bcc8-e207e47dad19)\"" pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" podUID="476bc421-1113-455e-bcc8-e207e47dad19" Sep 29 20:14:05 crc kubenswrapper[4779]: I0929 20:14:05.463433 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-4glfd" event={"ID":"6632e577-c567-485b-ab43-768b217fc3dd","Type":"ContainerStarted","Data":"dae6222a7c7250f759df52a5200f06464113a70fa9cd1d9d0183c1f066298eed"} Sep 29 20:14:05 crc kubenswrapper[4779]: I0929 20:14:05.492527 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-4glfd" podStartSLOduration=3.347329323 podStartE2EDuration="6.492509102s" podCreationTimestamp="2025-09-29 20:13:59 +0000 UTC" firstStartedPulling="2025-09-29 20:14:01.426954362 +0000 UTC m=+3952.311379462" lastFinishedPulling="2025-09-29 20:14:04.572134141 +0000 UTC m=+3955.456559241" observedRunningTime="2025-09-29 20:14:05.485765738 +0000 UTC m=+3956.370190838" watchObservedRunningTime="2025-09-29 20:14:05.492509102 +0000 UTC m=+3956.376934202" Sep 29 20:14:09 crc kubenswrapper[4779]: I0929 20:14:09.679430 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-4glfd" Sep 29 20:14:09 crc kubenswrapper[4779]: I0929 20:14:09.679899 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-4glfd" Sep 29 20:14:09 crc kubenswrapper[4779]: I0929 20:14:09.736786 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-4glfd" Sep 29 20:14:10 crc kubenswrapper[4779]: I0929 20:14:10.579660 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-4glfd" Sep 29 20:14:10 crc kubenswrapper[4779]: I0929 20:14:10.626796 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-4glfd"] Sep 29 20:14:12 crc kubenswrapper[4779]: I0929 20:14:12.554868 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-4glfd" podUID="6632e577-c567-485b-ab43-768b217fc3dd" containerName="registry-server" containerID="cri-o://dae6222a7c7250f759df52a5200f06464113a70fa9cd1d9d0183c1f066298eed" gracePeriod=2 Sep 29 20:14:13 crc kubenswrapper[4779]: I0929 20:14:13.094667 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-4glfd" Sep 29 20:14:13 crc kubenswrapper[4779]: I0929 20:14:13.123230 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-h4gmq\" (UniqueName: \"kubernetes.io/projected/6632e577-c567-485b-ab43-768b217fc3dd-kube-api-access-h4gmq\") pod \"6632e577-c567-485b-ab43-768b217fc3dd\" (UID: \"6632e577-c567-485b-ab43-768b217fc3dd\") " Sep 29 20:14:13 crc kubenswrapper[4779]: I0929 20:14:13.123278 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6632e577-c567-485b-ab43-768b217fc3dd-catalog-content\") pod \"6632e577-c567-485b-ab43-768b217fc3dd\" (UID: \"6632e577-c567-485b-ab43-768b217fc3dd\") " Sep 29 20:14:13 crc kubenswrapper[4779]: I0929 20:14:13.123375 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6632e577-c567-485b-ab43-768b217fc3dd-utilities\") pod \"6632e577-c567-485b-ab43-768b217fc3dd\" (UID: \"6632e577-c567-485b-ab43-768b217fc3dd\") " Sep 29 20:14:13 crc kubenswrapper[4779]: I0929 20:14:13.125014 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6632e577-c567-485b-ab43-768b217fc3dd-utilities" (OuterVolumeSpecName: "utilities") pod "6632e577-c567-485b-ab43-768b217fc3dd" (UID: "6632e577-c567-485b-ab43-768b217fc3dd"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 20:14:13 crc kubenswrapper[4779]: I0929 20:14:13.140662 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6632e577-c567-485b-ab43-768b217fc3dd-kube-api-access-h4gmq" (OuterVolumeSpecName: "kube-api-access-h4gmq") pod "6632e577-c567-485b-ab43-768b217fc3dd" (UID: "6632e577-c567-485b-ab43-768b217fc3dd"). InnerVolumeSpecName "kube-api-access-h4gmq". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 20:14:13 crc kubenswrapper[4779]: I0929 20:14:13.151393 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6632e577-c567-485b-ab43-768b217fc3dd-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "6632e577-c567-485b-ab43-768b217fc3dd" (UID: "6632e577-c567-485b-ab43-768b217fc3dd"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 20:14:13 crc kubenswrapper[4779]: I0929 20:14:13.227063 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-h4gmq\" (UniqueName: \"kubernetes.io/projected/6632e577-c567-485b-ab43-768b217fc3dd-kube-api-access-h4gmq\") on node \"crc\" DevicePath \"\"" Sep 29 20:14:13 crc kubenswrapper[4779]: I0929 20:14:13.227121 4779 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6632e577-c567-485b-ab43-768b217fc3dd-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 20:14:13 crc kubenswrapper[4779]: I0929 20:14:13.227139 4779 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6632e577-c567-485b-ab43-768b217fc3dd-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 20:14:13 crc kubenswrapper[4779]: I0929 20:14:13.564992 4779 generic.go:334] "Generic (PLEG): container finished" podID="6632e577-c567-485b-ab43-768b217fc3dd" containerID="dae6222a7c7250f759df52a5200f06464113a70fa9cd1d9d0183c1f066298eed" exitCode=0 Sep 29 20:14:13 crc kubenswrapper[4779]: I0929 20:14:13.565080 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-4glfd" event={"ID":"6632e577-c567-485b-ab43-768b217fc3dd","Type":"ContainerDied","Data":"dae6222a7c7250f759df52a5200f06464113a70fa9cd1d9d0183c1f066298eed"} Sep 29 20:14:13 crc kubenswrapper[4779]: I0929 20:14:13.565108 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-4glfd" event={"ID":"6632e577-c567-485b-ab43-768b217fc3dd","Type":"ContainerDied","Data":"e77a94a8c503625ef53273a951c2bbdee6c537be79a30af92257a20f90483470"} Sep 29 20:14:13 crc kubenswrapper[4779]: I0929 20:14:13.565125 4779 scope.go:117] "RemoveContainer" containerID="dae6222a7c7250f759df52a5200f06464113a70fa9cd1d9d0183c1f066298eed" Sep 29 20:14:13 crc kubenswrapper[4779]: I0929 20:14:13.565264 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-4glfd" Sep 29 20:14:13 crc kubenswrapper[4779]: I0929 20:14:13.595236 4779 scope.go:117] "RemoveContainer" containerID="20a9503e8df952975ae20819add448c1ec1c0c59d081ffb4ccc8357097e3eea3" Sep 29 20:14:13 crc kubenswrapper[4779]: I0929 20:14:13.621398 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-4glfd"] Sep 29 20:14:13 crc kubenswrapper[4779]: I0929 20:14:13.622758 4779 scope.go:117] "RemoveContainer" containerID="9c56e31f44c0fd66c027d863b9485f7c5091596719e512646bcb82e0ca02160a" Sep 29 20:14:13 crc kubenswrapper[4779]: I0929 20:14:13.631111 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-4glfd"] Sep 29 20:14:13 crc kubenswrapper[4779]: E0929 20:14:13.690713 4779 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod6632e577_c567_485b_ab43_768b217fc3dd.slice/crio-e77a94a8c503625ef53273a951c2bbdee6c537be79a30af92257a20f90483470\": RecentStats: unable to find data in memory cache]" Sep 29 20:14:13 crc kubenswrapper[4779]: I0929 20:14:13.692401 4779 scope.go:117] "RemoveContainer" containerID="dae6222a7c7250f759df52a5200f06464113a70fa9cd1d9d0183c1f066298eed" Sep 29 20:14:13 crc kubenswrapper[4779]: E0929 20:14:13.692764 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"dae6222a7c7250f759df52a5200f06464113a70fa9cd1d9d0183c1f066298eed\": container with ID starting with dae6222a7c7250f759df52a5200f06464113a70fa9cd1d9d0183c1f066298eed not found: ID does not exist" containerID="dae6222a7c7250f759df52a5200f06464113a70fa9cd1d9d0183c1f066298eed" Sep 29 20:14:13 crc kubenswrapper[4779]: I0929 20:14:13.692792 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dae6222a7c7250f759df52a5200f06464113a70fa9cd1d9d0183c1f066298eed"} err="failed to get container status \"dae6222a7c7250f759df52a5200f06464113a70fa9cd1d9d0183c1f066298eed\": rpc error: code = NotFound desc = could not find container \"dae6222a7c7250f759df52a5200f06464113a70fa9cd1d9d0183c1f066298eed\": container with ID starting with dae6222a7c7250f759df52a5200f06464113a70fa9cd1d9d0183c1f066298eed not found: ID does not exist" Sep 29 20:14:13 crc kubenswrapper[4779]: I0929 20:14:13.692811 4779 scope.go:117] "RemoveContainer" containerID="20a9503e8df952975ae20819add448c1ec1c0c59d081ffb4ccc8357097e3eea3" Sep 29 20:14:13 crc kubenswrapper[4779]: E0929 20:14:13.693016 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"20a9503e8df952975ae20819add448c1ec1c0c59d081ffb4ccc8357097e3eea3\": container with ID starting with 20a9503e8df952975ae20819add448c1ec1c0c59d081ffb4ccc8357097e3eea3 not found: ID does not exist" containerID="20a9503e8df952975ae20819add448c1ec1c0c59d081ffb4ccc8357097e3eea3" Sep 29 20:14:13 crc kubenswrapper[4779]: I0929 20:14:13.693050 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"20a9503e8df952975ae20819add448c1ec1c0c59d081ffb4ccc8357097e3eea3"} err="failed to get container status \"20a9503e8df952975ae20819add448c1ec1c0c59d081ffb4ccc8357097e3eea3\": rpc error: code = NotFound desc = could not find container \"20a9503e8df952975ae20819add448c1ec1c0c59d081ffb4ccc8357097e3eea3\": container with ID starting with 20a9503e8df952975ae20819add448c1ec1c0c59d081ffb4ccc8357097e3eea3 not found: ID does not exist" Sep 29 20:14:13 crc kubenswrapper[4779]: I0929 20:14:13.693069 4779 scope.go:117] "RemoveContainer" containerID="9c56e31f44c0fd66c027d863b9485f7c5091596719e512646bcb82e0ca02160a" Sep 29 20:14:13 crc kubenswrapper[4779]: E0929 20:14:13.693579 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9c56e31f44c0fd66c027d863b9485f7c5091596719e512646bcb82e0ca02160a\": container with ID starting with 9c56e31f44c0fd66c027d863b9485f7c5091596719e512646bcb82e0ca02160a not found: ID does not exist" containerID="9c56e31f44c0fd66c027d863b9485f7c5091596719e512646bcb82e0ca02160a" Sep 29 20:14:13 crc kubenswrapper[4779]: I0929 20:14:13.693646 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9c56e31f44c0fd66c027d863b9485f7c5091596719e512646bcb82e0ca02160a"} err="failed to get container status \"9c56e31f44c0fd66c027d863b9485f7c5091596719e512646bcb82e0ca02160a\": rpc error: code = NotFound desc = could not find container \"9c56e31f44c0fd66c027d863b9485f7c5091596719e512646bcb82e0ca02160a\": container with ID starting with 9c56e31f44c0fd66c027d863b9485f7c5091596719e512646bcb82e0ca02160a not found: ID does not exist" Sep 29 20:14:13 crc kubenswrapper[4779]: I0929 20:14:13.783998 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6632e577-c567-485b-ab43-768b217fc3dd" path="/var/lib/kubelet/pods/6632e577-c567-485b-ab43-768b217fc3dd/volumes" Sep 29 20:14:16 crc kubenswrapper[4779]: I0929 20:14:16.766427 4779 scope.go:117] "RemoveContainer" containerID="ce7c9aaefc0e94314f1e9cd9f04a105e1ba3317878327817d7948af06e95f619" Sep 29 20:14:16 crc kubenswrapper[4779]: E0929 20:14:16.766885 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d5cnr_openshift-machine-config-operator(476bc421-1113-455e-bcc8-e207e47dad19)\"" pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" podUID="476bc421-1113-455e-bcc8-e207e47dad19" Sep 29 20:14:17 crc kubenswrapper[4779]: I0929 20:14:17.030548 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-f48875c8b-24729_06187c54-071a-4a20-adc1-84627f949933/barbican-api-log/0.log" Sep 29 20:14:17 crc kubenswrapper[4779]: I0929 20:14:17.041223 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-f48875c8b-24729_06187c54-071a-4a20-adc1-84627f949933/barbican-api/0.log" Sep 29 20:14:17 crc kubenswrapper[4779]: I0929 20:14:17.183285 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-58f6cf64bb-t8fdp_d3365fba-7e29-4f75-aa74-67ffd7275a15/barbican-keystone-listener/0.log" Sep 29 20:14:17 crc kubenswrapper[4779]: I0929 20:14:17.301472 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-58f6cf64bb-t8fdp_d3365fba-7e29-4f75-aa74-67ffd7275a15/barbican-keystone-listener-log/0.log" Sep 29 20:14:17 crc kubenswrapper[4779]: I0929 20:14:17.421990 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-7bd9b84c75-dphls_7f98eb0b-f8cf-42b7-a6f7-5a803c2be72b/barbican-worker/0.log" Sep 29 20:14:17 crc kubenswrapper[4779]: I0929 20:14:17.474451 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-7bd9b84c75-dphls_7f98eb0b-f8cf-42b7-a6f7-5a803c2be72b/barbican-worker-log/0.log" Sep 29 20:14:17 crc kubenswrapper[4779]: I0929 20:14:17.721477 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_bootstrap-edpm-deployment-openstack-edpm-ipam-d25cs_8beecba1-5edc-4f95-a9ad-49889c62c0ae/bootstrap-edpm-deployment-openstack-edpm-ipam/0.log" Sep 29 20:14:17 crc kubenswrapper[4779]: I0929 20:14:17.855242 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_743a0275-ddbc-4917-adf2-0d268c8fe08b/ceilometer-central-agent/0.log" Sep 29 20:14:17 crc kubenswrapper[4779]: I0929 20:14:17.877552 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_743a0275-ddbc-4917-adf2-0d268c8fe08b/proxy-httpd/0.log" Sep 29 20:14:17 crc kubenswrapper[4779]: I0929 20:14:17.889858 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_743a0275-ddbc-4917-adf2-0d268c8fe08b/ceilometer-notification-agent/0.log" Sep 29 20:14:18 crc kubenswrapper[4779]: I0929 20:14:18.034732 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_743a0275-ddbc-4917-adf2-0d268c8fe08b/sg-core/0.log" Sep 29 20:14:18 crc kubenswrapper[4779]: I0929 20:14:18.144239 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_e063f6a5-dcd3-413d-bb65-e9ceeca73df0/cinder-api/0.log" Sep 29 20:14:18 crc kubenswrapper[4779]: I0929 20:14:18.265352 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_e063f6a5-dcd3-413d-bb65-e9ceeca73df0/cinder-api-log/0.log" Sep 29 20:14:18 crc kubenswrapper[4779]: I0929 20:14:18.389560 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_a6f49188-efdd-4f27-ad02-4656f2cf5d11/cinder-scheduler/0.log" Sep 29 20:14:18 crc kubenswrapper[4779]: I0929 20:14:18.558588 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_a6f49188-efdd-4f27-ad02-4656f2cf5d11/probe/0.log" Sep 29 20:14:18 crc kubenswrapper[4779]: I0929 20:14:18.591688 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-network-edpm-deployment-openstack-edpm-ipam-c66cd_02971c6b-be51-4634-b3a0-661125814bea/configure-network-edpm-deployment-openstack-edpm-ipam/0.log" Sep 29 20:14:18 crc kubenswrapper[4779]: I0929 20:14:18.806849 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-os-edpm-deployment-openstack-edpm-ipam-xxmr6_e2235a29-5c01-4d29-a4cb-97f0abe8ca63/configure-os-edpm-deployment-openstack-edpm-ipam/0.log" Sep 29 20:14:18 crc kubenswrapper[4779]: I0929 20:14:18.964003 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-cb6ffcf87-x2rzr_5e41bca4-c1c5-4a1f-b5a2-4f1d2af086a4/init/0.log" Sep 29 20:14:19 crc kubenswrapper[4779]: I0929 20:14:19.149814 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-cb6ffcf87-x2rzr_5e41bca4-c1c5-4a1f-b5a2-4f1d2af086a4/init/0.log" Sep 29 20:14:19 crc kubenswrapper[4779]: I0929 20:14:19.169427 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-cb6ffcf87-x2rzr_5e41bca4-c1c5-4a1f-b5a2-4f1d2af086a4/dnsmasq-dns/0.log" Sep 29 20:14:19 crc kubenswrapper[4779]: I0929 20:14:19.340270 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_download-cache-edpm-deployment-openstack-edpm-ipam-sf7pw_b2f2a76e-5c5c-4708-bc75-12909e8859fc/download-cache-edpm-deployment-openstack-edpm-ipam/0.log" Sep 29 20:14:19 crc kubenswrapper[4779]: I0929 20:14:19.424378 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_fed19b9e-ec0b-4944-a98c-885ec4862d48/glance-httpd/0.log" Sep 29 20:14:19 crc kubenswrapper[4779]: I0929 20:14:19.504528 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_fed19b9e-ec0b-4944-a98c-885ec4862d48/glance-log/0.log" Sep 29 20:14:19 crc kubenswrapper[4779]: I0929 20:14:19.599700 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_880c493a-a9b5-4cdc-a4b1-256feeee3e1b/glance-httpd/0.log" Sep 29 20:14:19 crc kubenswrapper[4779]: I0929 20:14:19.711613 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_880c493a-a9b5-4cdc-a4b1-256feeee3e1b/glance-log/0.log" Sep 29 20:14:19 crc kubenswrapper[4779]: I0929 20:14:19.930388 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_horizon-fc6fd7df6-btpzz_6cd722c9-4e9b-4bad-a9fd-84529803680b/horizon/0.log" Sep 29 20:14:20 crc kubenswrapper[4779]: I0929 20:14:20.041781 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-certs-edpm-deployment-openstack-edpm-ipam-9xmjg_d00741d2-40ff-4d5c-b697-cc4ac7ed7511/install-certs-edpm-deployment-openstack-edpm-ipam/0.log" Sep 29 20:14:20 crc kubenswrapper[4779]: I0929 20:14:20.253262 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-os-edpm-deployment-openstack-edpm-ipam-lc87v_0651501f-91fe-410c-9e0c-d1e49760bedd/install-os-edpm-deployment-openstack-edpm-ipam/0.log" Sep 29 20:14:20 crc kubenswrapper[4779]: I0929 20:14:20.284636 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_horizon-fc6fd7df6-btpzz_6cd722c9-4e9b-4bad-a9fd-84529803680b/horizon-log/0.log" Sep 29 20:14:20 crc kubenswrapper[4779]: I0929 20:14:20.477546 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-cron-29319601-sr9fp_329c65e1-c7ba-4829-9ba7-6cbaf2e93d48/keystone-cron/0.log" Sep 29 20:14:20 crc kubenswrapper[4779]: I0929 20:14:20.551357 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-6d4574fd6f-56jht_413037bd-ba8f-4874-a915-1c77426d689b/keystone-api/0.log" Sep 29 20:14:20 crc kubenswrapper[4779]: I0929 20:14:20.652674 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_kube-state-metrics-0_43945a37-324b-4e37-a960-d92da8f5e56f/kube-state-metrics/0.log" Sep 29 20:14:20 crc kubenswrapper[4779]: I0929 20:14:20.739764 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_libvirt-edpm-deployment-openstack-edpm-ipam-r5m6z_7ff61060-93f6-4bd6-a6f9-75195322a8d2/libvirt-edpm-deployment-openstack-edpm-ipam/0.log" Sep 29 20:14:21 crc kubenswrapper[4779]: I0929 20:14:21.234968 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-d6758dbc9-fppqt_3225f7bc-88d9-4d11-a415-e6a421573849/neutron-httpd/0.log" Sep 29 20:14:21 crc kubenswrapper[4779]: I0929 20:14:21.254871 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-d6758dbc9-fppqt_3225f7bc-88d9-4d11-a415-e6a421573849/neutron-api/0.log" Sep 29 20:14:21 crc kubenswrapper[4779]: I0929 20:14:21.443258 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-metadata-edpm-deployment-openstack-edpm-ipam-fkq7j_b17f52c4-7329-4262-87d2-d5ef94e88f28/neutron-metadata-edpm-deployment-openstack-edpm-ipam/0.log" Sep 29 20:14:21 crc kubenswrapper[4779]: I0929 20:14:21.968791 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_5d230bec-c78b-45a3-b334-7353e1a8b827/nova-api-log/0.log" Sep 29 20:14:22 crc kubenswrapper[4779]: I0929 20:14:22.191757 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell0-conductor-0_7b53aa08-b1e7-4e69-86f3-830bb5c84002/nova-cell0-conductor-conductor/0.log" Sep 29 20:14:22 crc kubenswrapper[4779]: I0929 20:14:22.481662 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_5d230bec-c78b-45a3-b334-7353e1a8b827/nova-api-api/0.log" Sep 29 20:14:22 crc kubenswrapper[4779]: I0929 20:14:22.517463 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-conductor-0_87caa150-0994-48a8-816a-b6dccebf4092/nova-cell1-conductor-conductor/0.log" Sep 29 20:14:22 crc kubenswrapper[4779]: I0929 20:14:22.778741 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-edpm-deployment-openstack-edpm-ipam-h4cr7_06ca3195-0d79-4376-9627-6075a8cdf09c/nova-edpm-deployment-openstack-edpm-ipam/0.log" Sep 29 20:14:22 crc kubenswrapper[4779]: I0929 20:14:22.781965 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-novncproxy-0_d6e31019-bd61-45e8-9380-b973dcbe4873/nova-cell1-novncproxy-novncproxy/0.log" Sep 29 20:14:23 crc kubenswrapper[4779]: I0929 20:14:23.144830 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_5f5393b3-8bf7-4578-98c9-2323a8ec7ea6/nova-metadata-log/0.log" Sep 29 20:14:23 crc kubenswrapper[4779]: I0929 20:14:23.587998 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-scheduler-0_26aa6a8b-e675-4b3e-aa17-9ed17b49c907/nova-scheduler-scheduler/0.log" Sep 29 20:14:23 crc kubenswrapper[4779]: I0929 20:14:23.605210 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_31200251-0f84-4946-88fb-276aa79589d9/mysql-bootstrap/0.log" Sep 29 20:14:23 crc kubenswrapper[4779]: I0929 20:14:23.756073 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_31200251-0f84-4946-88fb-276aa79589d9/mysql-bootstrap/0.log" Sep 29 20:14:23 crc kubenswrapper[4779]: I0929 20:14:23.849574 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_31200251-0f84-4946-88fb-276aa79589d9/galera/0.log" Sep 29 20:14:24 crc kubenswrapper[4779]: I0929 20:14:24.035453 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_f5cf224b-f179-4c3c-bc69-5f3d448aca1d/mysql-bootstrap/0.log" Sep 29 20:14:24 crc kubenswrapper[4779]: I0929 20:14:24.254337 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_f5cf224b-f179-4c3c-bc69-5f3d448aca1d/galera/0.log" Sep 29 20:14:24 crc kubenswrapper[4779]: I0929 20:14:24.312309 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_f5cf224b-f179-4c3c-bc69-5f3d448aca1d/mysql-bootstrap/0.log" Sep 29 20:14:24 crc kubenswrapper[4779]: I0929 20:14:24.499754 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstackclient_58f628df-8d11-4663-b84b-0c810edaa5fb/openstackclient/0.log" Sep 29 20:14:24 crc kubenswrapper[4779]: I0929 20:14:24.686241 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_5f5393b3-8bf7-4578-98c9-2323a8ec7ea6/nova-metadata-metadata/0.log" Sep 29 20:14:24 crc kubenswrapper[4779]: I0929 20:14:24.701441 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-metrics-ds8vm_e1ff3819-9cf2-492f-a447-94a898e0a54d/openstack-network-exporter/0.log" Sep 29 20:14:24 crc kubenswrapper[4779]: I0929 20:14:24.911482 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-lx5dn_8cebec61-5d6f-4bd2-a9e7-61f0b0f74751/ovsdb-server-init/0.log" Sep 29 20:14:25 crc kubenswrapper[4779]: I0929 20:14:25.088422 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-lx5dn_8cebec61-5d6f-4bd2-a9e7-61f0b0f74751/ovsdb-server-init/0.log" Sep 29 20:14:25 crc kubenswrapper[4779]: I0929 20:14:25.151911 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-lx5dn_8cebec61-5d6f-4bd2-a9e7-61f0b0f74751/ovsdb-server/0.log" Sep 29 20:14:25 crc kubenswrapper[4779]: I0929 20:14:25.179742 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-lx5dn_8cebec61-5d6f-4bd2-a9e7-61f0b0f74751/ovs-vswitchd/0.log" Sep 29 20:14:25 crc kubenswrapper[4779]: I0929 20:14:25.402197 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-rwqzd_9771c712-00ce-4dcf-ab04-7b6893c8725c/ovn-controller/0.log" Sep 29 20:14:25 crc kubenswrapper[4779]: I0929 20:14:25.598438 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-edpm-deployment-openstack-edpm-ipam-626r9_0b9b5a28-3a66-4041-a143-8c8a40b27ef4/ovn-edpm-deployment-openstack-edpm-ipam/0.log" Sep 29 20:14:25 crc kubenswrapper[4779]: I0929 20:14:25.662375 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_210875cc-31fa-480b-bd01-a042fc73dcd6/openstack-network-exporter/0.log" Sep 29 20:14:25 crc kubenswrapper[4779]: I0929 20:14:25.794121 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_210875cc-31fa-480b-bd01-a042fc73dcd6/ovn-northd/0.log" Sep 29 20:14:25 crc kubenswrapper[4779]: I0929 20:14:25.862226 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_79136f36-d427-488e-81e2-ef55c73ee91a/openstack-network-exporter/0.log" Sep 29 20:14:26 crc kubenswrapper[4779]: I0929 20:14:26.009632 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_79136f36-d427-488e-81e2-ef55c73ee91a/ovsdbserver-nb/0.log" Sep 29 20:14:26 crc kubenswrapper[4779]: I0929 20:14:26.122841 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_bbcdb00b-849b-40c6-9e53-2c751dbd11dd/openstack-network-exporter/0.log" Sep 29 20:14:26 crc kubenswrapper[4779]: I0929 20:14:26.191980 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_bbcdb00b-849b-40c6-9e53-2c751dbd11dd/ovsdbserver-sb/0.log" Sep 29 20:14:26 crc kubenswrapper[4779]: I0929 20:14:26.377908 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-7846dbb58d-4ftsw_7b7c84ea-91fb-467d-b935-6f5034a2d7fb/placement-api/0.log" Sep 29 20:14:26 crc kubenswrapper[4779]: I0929 20:14:26.523979 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-7846dbb58d-4ftsw_7b7c84ea-91fb-467d-b935-6f5034a2d7fb/placement-log/0.log" Sep 29 20:14:26 crc kubenswrapper[4779]: I0929 20:14:26.635831 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_e0f654c2-f8a3-4049-a18c-75f12edc65ca/setup-container/0.log" Sep 29 20:14:26 crc kubenswrapper[4779]: I0929 20:14:26.816814 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_e0f654c2-f8a3-4049-a18c-75f12edc65ca/setup-container/0.log" Sep 29 20:14:26 crc kubenswrapper[4779]: I0929 20:14:26.830535 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_e0f654c2-f8a3-4049-a18c-75f12edc65ca/rabbitmq/0.log" Sep 29 20:14:27 crc kubenswrapper[4779]: I0929 20:14:27.009444 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_cde20801-b6a5-444f-ad26-2b36244bb38d/setup-container/0.log" Sep 29 20:14:27 crc kubenswrapper[4779]: I0929 20:14:27.179690 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_cde20801-b6a5-444f-ad26-2b36244bb38d/setup-container/0.log" Sep 29 20:14:27 crc kubenswrapper[4779]: I0929 20:14:27.256355 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_cde20801-b6a5-444f-ad26-2b36244bb38d/rabbitmq/0.log" Sep 29 20:14:27 crc kubenswrapper[4779]: I0929 20:14:27.396654 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_reboot-os-edpm-deployment-openstack-edpm-ipam-9k524_501fc953-417c-4abf-aafc-9cc25c3ecb23/reboot-os-edpm-deployment-openstack-edpm-ipam/0.log" Sep 29 20:14:27 crc kubenswrapper[4779]: I0929 20:14:27.551815 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_redhat-edpm-deployment-openstack-edpm-ipam-dht8n_630eccd4-a2c9-4003-b315-2d8d18ebeeba/redhat-edpm-deployment-openstack-edpm-ipam/0.log" Sep 29 20:14:27 crc kubenswrapper[4779]: I0929 20:14:27.761802 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_repo-setup-edpm-deployment-openstack-edpm-ipam-pkmk8_76990f22-5c56-482e-a5a6-6d3c74bba7cd/repo-setup-edpm-deployment-openstack-edpm-ipam/0.log" Sep 29 20:14:27 crc kubenswrapper[4779]: I0929 20:14:27.809664 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_run-os-edpm-deployment-openstack-edpm-ipam-6j4j9_aceb0e20-3731-4018-947e-40f2193b8c0a/run-os-edpm-deployment-openstack-edpm-ipam/0.log" Sep 29 20:14:27 crc kubenswrapper[4779]: I0929 20:14:27.983538 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ssh-known-hosts-edpm-deployment-nrsx2_a075ace8-eeae-4fa5-9353-72e217e82dfd/ssh-known-hosts-edpm-deployment/0.log" Sep 29 20:14:28 crc kubenswrapper[4779]: I0929 20:14:28.275594 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-proxy-76666bfbfc-dj7qj_1dd6be85-ce64-429a-9197-23450db2e2ad/proxy-server/0.log" Sep 29 20:14:28 crc kubenswrapper[4779]: I0929 20:14:28.335541 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-proxy-76666bfbfc-dj7qj_1dd6be85-ce64-429a-9197-23450db2e2ad/proxy-httpd/0.log" Sep 29 20:14:28 crc kubenswrapper[4779]: I0929 20:14:28.460381 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-ring-rebalance-wzdw7_587857be-cc5b-43cb-bf66-d9e7aadcc587/swift-ring-rebalance/0.log" Sep 29 20:14:28 crc kubenswrapper[4779]: I0929 20:14:28.543251 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_a214376c-8f64-4f89-9354-14de32e2f17f/account-auditor/0.log" Sep 29 20:14:28 crc kubenswrapper[4779]: I0929 20:14:28.679126 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_a214376c-8f64-4f89-9354-14de32e2f17f/account-reaper/0.log" Sep 29 20:14:28 crc kubenswrapper[4779]: I0929 20:14:28.766026 4779 scope.go:117] "RemoveContainer" containerID="ce7c9aaefc0e94314f1e9cd9f04a105e1ba3317878327817d7948af06e95f619" Sep 29 20:14:28 crc kubenswrapper[4779]: E0929 20:14:28.766364 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d5cnr_openshift-machine-config-operator(476bc421-1113-455e-bcc8-e207e47dad19)\"" pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" podUID="476bc421-1113-455e-bcc8-e207e47dad19" Sep 29 20:14:28 crc kubenswrapper[4779]: I0929 20:14:28.775001 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_a214376c-8f64-4f89-9354-14de32e2f17f/account-replicator/0.log" Sep 29 20:14:28 crc kubenswrapper[4779]: I0929 20:14:28.816969 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_a214376c-8f64-4f89-9354-14de32e2f17f/account-server/0.log" Sep 29 20:14:28 crc kubenswrapper[4779]: I0929 20:14:28.845145 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_a214376c-8f64-4f89-9354-14de32e2f17f/container-auditor/0.log" Sep 29 20:14:28 crc kubenswrapper[4779]: I0929 20:14:28.969946 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_a214376c-8f64-4f89-9354-14de32e2f17f/container-server/0.log" Sep 29 20:14:29 crc kubenswrapper[4779]: I0929 20:14:29.031754 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_a214376c-8f64-4f89-9354-14de32e2f17f/container-replicator/0.log" Sep 29 20:14:29 crc kubenswrapper[4779]: I0929 20:14:29.076795 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_a214376c-8f64-4f89-9354-14de32e2f17f/container-updater/0.log" Sep 29 20:14:29 crc kubenswrapper[4779]: I0929 20:14:29.235383 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_a214376c-8f64-4f89-9354-14de32e2f17f/object-auditor/0.log" Sep 29 20:14:29 crc kubenswrapper[4779]: I0929 20:14:29.288040 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_a214376c-8f64-4f89-9354-14de32e2f17f/object-expirer/0.log" Sep 29 20:14:29 crc kubenswrapper[4779]: I0929 20:14:29.308215 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_a214376c-8f64-4f89-9354-14de32e2f17f/object-replicator/0.log" Sep 29 20:14:29 crc kubenswrapper[4779]: I0929 20:14:29.430709 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_a214376c-8f64-4f89-9354-14de32e2f17f/object-server/0.log" Sep 29 20:14:29 crc kubenswrapper[4779]: I0929 20:14:29.481697 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_a214376c-8f64-4f89-9354-14de32e2f17f/object-updater/0.log" Sep 29 20:14:29 crc kubenswrapper[4779]: I0929 20:14:29.525475 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_a214376c-8f64-4f89-9354-14de32e2f17f/rsync/0.log" Sep 29 20:14:29 crc kubenswrapper[4779]: I0929 20:14:29.630685 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_a214376c-8f64-4f89-9354-14de32e2f17f/swift-recon-cron/0.log" Sep 29 20:14:29 crc kubenswrapper[4779]: I0929 20:14:29.756557 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_telemetry-edpm-deployment-openstack-edpm-ipam-vf74d_a8bc7976-d585-4a94-b925-870996cc4ae3/telemetry-edpm-deployment-openstack-edpm-ipam/0.log" Sep 29 20:14:29 crc kubenswrapper[4779]: I0929 20:14:29.946583 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_tempest-tests-tempest_8bf09edd-ad1f-4883-ade0-8082b2055f60/tempest-tests-tempest-tests-runner/0.log" Sep 29 20:14:30 crc kubenswrapper[4779]: I0929 20:14:30.148821 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_test-operator-logs-pod-tempest-tempest-tests-tempest_ab44e769-9e8e-4cec-8fa4-97f93b25dd8c/test-operator-logs-container/0.log" Sep 29 20:14:30 crc kubenswrapper[4779]: I0929 20:14:30.189360 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_validate-network-edpm-deployment-openstack-edpm-ipam-9cnjt_2ef530d3-702c-44e3-a066-85a59398fafc/validate-network-edpm-deployment-openstack-edpm-ipam/0.log" Sep 29 20:14:39 crc kubenswrapper[4779]: I0929 20:14:39.866847 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_memcached-0_8de5fc49-d446-41aa-aa5e-d32fd04a281e/memcached/0.log" Sep 29 20:14:40 crc kubenswrapper[4779]: I0929 20:14:40.767006 4779 scope.go:117] "RemoveContainer" containerID="ce7c9aaefc0e94314f1e9cd9f04a105e1ba3317878327817d7948af06e95f619" Sep 29 20:14:40 crc kubenswrapper[4779]: E0929 20:14:40.767433 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d5cnr_openshift-machine-config-operator(476bc421-1113-455e-bcc8-e207e47dad19)\"" pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" podUID="476bc421-1113-455e-bcc8-e207e47dad19" Sep 29 20:14:55 crc kubenswrapper[4779]: I0929 20:14:55.777975 4779 scope.go:117] "RemoveContainer" containerID="ce7c9aaefc0e94314f1e9cd9f04a105e1ba3317878327817d7948af06e95f619" Sep 29 20:14:55 crc kubenswrapper[4779]: E0929 20:14:55.778986 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d5cnr_openshift-machine-config-operator(476bc421-1113-455e-bcc8-e207e47dad19)\"" pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" podUID="476bc421-1113-455e-bcc8-e207e47dad19" Sep 29 20:15:00 crc kubenswrapper[4779]: I0929 20:15:00.163627 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319615-z4fzs"] Sep 29 20:15:00 crc kubenswrapper[4779]: E0929 20:15:00.164838 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6632e577-c567-485b-ab43-768b217fc3dd" containerName="extract-utilities" Sep 29 20:15:00 crc kubenswrapper[4779]: I0929 20:15:00.164861 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="6632e577-c567-485b-ab43-768b217fc3dd" containerName="extract-utilities" Sep 29 20:15:00 crc kubenswrapper[4779]: E0929 20:15:00.164899 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6632e577-c567-485b-ab43-768b217fc3dd" containerName="extract-content" Sep 29 20:15:00 crc kubenswrapper[4779]: I0929 20:15:00.164910 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="6632e577-c567-485b-ab43-768b217fc3dd" containerName="extract-content" Sep 29 20:15:00 crc kubenswrapper[4779]: E0929 20:15:00.164934 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6632e577-c567-485b-ab43-768b217fc3dd" containerName="registry-server" Sep 29 20:15:00 crc kubenswrapper[4779]: I0929 20:15:00.164943 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="6632e577-c567-485b-ab43-768b217fc3dd" containerName="registry-server" Sep 29 20:15:00 crc kubenswrapper[4779]: I0929 20:15:00.165218 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="6632e577-c567-485b-ab43-768b217fc3dd" containerName="registry-server" Sep 29 20:15:00 crc kubenswrapper[4779]: I0929 20:15:00.166036 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319615-z4fzs" Sep 29 20:15:00 crc kubenswrapper[4779]: I0929 20:15:00.172437 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Sep 29 20:15:00 crc kubenswrapper[4779]: I0929 20:15:00.172498 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Sep 29 20:15:00 crc kubenswrapper[4779]: I0929 20:15:00.181062 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319615-z4fzs"] Sep 29 20:15:00 crc kubenswrapper[4779]: I0929 20:15:00.285275 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jgptd\" (UniqueName: \"kubernetes.io/projected/55746335-4c13-494f-be13-f3450113766b-kube-api-access-jgptd\") pod \"collect-profiles-29319615-z4fzs\" (UID: \"55746335-4c13-494f-be13-f3450113766b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319615-z4fzs" Sep 29 20:15:00 crc kubenswrapper[4779]: I0929 20:15:00.285442 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/55746335-4c13-494f-be13-f3450113766b-secret-volume\") pod \"collect-profiles-29319615-z4fzs\" (UID: \"55746335-4c13-494f-be13-f3450113766b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319615-z4fzs" Sep 29 20:15:00 crc kubenswrapper[4779]: I0929 20:15:00.285553 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/55746335-4c13-494f-be13-f3450113766b-config-volume\") pod \"collect-profiles-29319615-z4fzs\" (UID: \"55746335-4c13-494f-be13-f3450113766b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319615-z4fzs" Sep 29 20:15:00 crc kubenswrapper[4779]: I0929 20:15:00.386658 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jgptd\" (UniqueName: \"kubernetes.io/projected/55746335-4c13-494f-be13-f3450113766b-kube-api-access-jgptd\") pod \"collect-profiles-29319615-z4fzs\" (UID: \"55746335-4c13-494f-be13-f3450113766b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319615-z4fzs" Sep 29 20:15:00 crc kubenswrapper[4779]: I0929 20:15:00.386708 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/55746335-4c13-494f-be13-f3450113766b-secret-volume\") pod \"collect-profiles-29319615-z4fzs\" (UID: \"55746335-4c13-494f-be13-f3450113766b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319615-z4fzs" Sep 29 20:15:00 crc kubenswrapper[4779]: I0929 20:15:00.386840 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/55746335-4c13-494f-be13-f3450113766b-config-volume\") pod \"collect-profiles-29319615-z4fzs\" (UID: \"55746335-4c13-494f-be13-f3450113766b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319615-z4fzs" Sep 29 20:15:00 crc kubenswrapper[4779]: I0929 20:15:00.387843 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/55746335-4c13-494f-be13-f3450113766b-config-volume\") pod \"collect-profiles-29319615-z4fzs\" (UID: \"55746335-4c13-494f-be13-f3450113766b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319615-z4fzs" Sep 29 20:15:00 crc kubenswrapper[4779]: I0929 20:15:00.393203 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/55746335-4c13-494f-be13-f3450113766b-secret-volume\") pod \"collect-profiles-29319615-z4fzs\" (UID: \"55746335-4c13-494f-be13-f3450113766b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319615-z4fzs" Sep 29 20:15:00 crc kubenswrapper[4779]: I0929 20:15:00.403704 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jgptd\" (UniqueName: \"kubernetes.io/projected/55746335-4c13-494f-be13-f3450113766b-kube-api-access-jgptd\") pod \"collect-profiles-29319615-z4fzs\" (UID: \"55746335-4c13-494f-be13-f3450113766b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319615-z4fzs" Sep 29 20:15:00 crc kubenswrapper[4779]: I0929 20:15:00.499120 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319615-z4fzs" Sep 29 20:15:00 crc kubenswrapper[4779]: I0929 20:15:00.948131 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319615-z4fzs"] Sep 29 20:15:01 crc kubenswrapper[4779]: I0929 20:15:01.008546 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29319615-z4fzs" event={"ID":"55746335-4c13-494f-be13-f3450113766b","Type":"ContainerStarted","Data":"65a195962a176be6ee627e8c754382ab25da1890561c5dacfea1c3b46e4ccbf0"} Sep 29 20:15:02 crc kubenswrapper[4779]: I0929 20:15:02.038722 4779 generic.go:334] "Generic (PLEG): container finished" podID="55746335-4c13-494f-be13-f3450113766b" containerID="80dc2fb9546e3ef4e2dbb217e321f2161c420f50511347f099a5c51b3fbda7ee" exitCode=0 Sep 29 20:15:02 crc kubenswrapper[4779]: I0929 20:15:02.038885 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29319615-z4fzs" event={"ID":"55746335-4c13-494f-be13-f3450113766b","Type":"ContainerDied","Data":"80dc2fb9546e3ef4e2dbb217e321f2161c420f50511347f099a5c51b3fbda7ee"} Sep 29 20:15:03 crc kubenswrapper[4779]: I0929 20:15:03.053077 4779 generic.go:334] "Generic (PLEG): container finished" podID="ee3b1085-55b4-4841-bfb5-af563f17b394" containerID="b786ea67e3cb0aff4e1644987b428d6109c88bb5e2a692997b38b4c33afe493f" exitCode=0 Sep 29 20:15:03 crc kubenswrapper[4779]: I0929 20:15:03.053173 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-q2hkw/crc-debug-wxmdc" event={"ID":"ee3b1085-55b4-4841-bfb5-af563f17b394","Type":"ContainerDied","Data":"b786ea67e3cb0aff4e1644987b428d6109c88bb5e2a692997b38b4c33afe493f"} Sep 29 20:15:03 crc kubenswrapper[4779]: I0929 20:15:03.413587 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319615-z4fzs" Sep 29 20:15:03 crc kubenswrapper[4779]: I0929 20:15:03.458267 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/55746335-4c13-494f-be13-f3450113766b-config-volume\") pod \"55746335-4c13-494f-be13-f3450113766b\" (UID: \"55746335-4c13-494f-be13-f3450113766b\") " Sep 29 20:15:03 crc kubenswrapper[4779]: I0929 20:15:03.458435 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jgptd\" (UniqueName: \"kubernetes.io/projected/55746335-4c13-494f-be13-f3450113766b-kube-api-access-jgptd\") pod \"55746335-4c13-494f-be13-f3450113766b\" (UID: \"55746335-4c13-494f-be13-f3450113766b\") " Sep 29 20:15:03 crc kubenswrapper[4779]: I0929 20:15:03.458618 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/55746335-4c13-494f-be13-f3450113766b-secret-volume\") pod \"55746335-4c13-494f-be13-f3450113766b\" (UID: \"55746335-4c13-494f-be13-f3450113766b\") " Sep 29 20:15:03 crc kubenswrapper[4779]: I0929 20:15:03.483749 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/55746335-4c13-494f-be13-f3450113766b-config-volume" (OuterVolumeSpecName: "config-volume") pod "55746335-4c13-494f-be13-f3450113766b" (UID: "55746335-4c13-494f-be13-f3450113766b"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 20:15:03 crc kubenswrapper[4779]: I0929 20:15:03.483891 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/55746335-4c13-494f-be13-f3450113766b-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "55746335-4c13-494f-be13-f3450113766b" (UID: "55746335-4c13-494f-be13-f3450113766b"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 20:15:03 crc kubenswrapper[4779]: I0929 20:15:03.485759 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/55746335-4c13-494f-be13-f3450113766b-kube-api-access-jgptd" (OuterVolumeSpecName: "kube-api-access-jgptd") pod "55746335-4c13-494f-be13-f3450113766b" (UID: "55746335-4c13-494f-be13-f3450113766b"). InnerVolumeSpecName "kube-api-access-jgptd". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 20:15:03 crc kubenswrapper[4779]: I0929 20:15:03.560435 4779 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/55746335-4c13-494f-be13-f3450113766b-secret-volume\") on node \"crc\" DevicePath \"\"" Sep 29 20:15:03 crc kubenswrapper[4779]: I0929 20:15:03.560469 4779 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/55746335-4c13-494f-be13-f3450113766b-config-volume\") on node \"crc\" DevicePath \"\"" Sep 29 20:15:03 crc kubenswrapper[4779]: I0929 20:15:03.560480 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jgptd\" (UniqueName: \"kubernetes.io/projected/55746335-4c13-494f-be13-f3450113766b-kube-api-access-jgptd\") on node \"crc\" DevicePath \"\"" Sep 29 20:15:04 crc kubenswrapper[4779]: I0929 20:15:04.068651 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29319615-z4fzs" event={"ID":"55746335-4c13-494f-be13-f3450113766b","Type":"ContainerDied","Data":"65a195962a176be6ee627e8c754382ab25da1890561c5dacfea1c3b46e4ccbf0"} Sep 29 20:15:04 crc kubenswrapper[4779]: I0929 20:15:04.069016 4779 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="65a195962a176be6ee627e8c754382ab25da1890561c5dacfea1c3b46e4ccbf0" Sep 29 20:15:04 crc kubenswrapper[4779]: I0929 20:15:04.068701 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319615-z4fzs" Sep 29 20:15:04 crc kubenswrapper[4779]: I0929 20:15:04.157562 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-q2hkw/crc-debug-wxmdc" Sep 29 20:15:04 crc kubenswrapper[4779]: I0929 20:15:04.217108 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-q2hkw/crc-debug-wxmdc"] Sep 29 20:15:04 crc kubenswrapper[4779]: I0929 20:15:04.226900 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-q2hkw/crc-debug-wxmdc"] Sep 29 20:15:04 crc kubenswrapper[4779]: I0929 20:15:04.280095 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-z85fc\" (UniqueName: \"kubernetes.io/projected/ee3b1085-55b4-4841-bfb5-af563f17b394-kube-api-access-z85fc\") pod \"ee3b1085-55b4-4841-bfb5-af563f17b394\" (UID: \"ee3b1085-55b4-4841-bfb5-af563f17b394\") " Sep 29 20:15:04 crc kubenswrapper[4779]: I0929 20:15:04.280272 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/ee3b1085-55b4-4841-bfb5-af563f17b394-host\") pod \"ee3b1085-55b4-4841-bfb5-af563f17b394\" (UID: \"ee3b1085-55b4-4841-bfb5-af563f17b394\") " Sep 29 20:15:04 crc kubenswrapper[4779]: I0929 20:15:04.280644 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/ee3b1085-55b4-4841-bfb5-af563f17b394-host" (OuterVolumeSpecName: "host") pod "ee3b1085-55b4-4841-bfb5-af563f17b394" (UID: "ee3b1085-55b4-4841-bfb5-af563f17b394"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 29 20:15:04 crc kubenswrapper[4779]: I0929 20:15:04.284339 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ee3b1085-55b4-4841-bfb5-af563f17b394-kube-api-access-z85fc" (OuterVolumeSpecName: "kube-api-access-z85fc") pod "ee3b1085-55b4-4841-bfb5-af563f17b394" (UID: "ee3b1085-55b4-4841-bfb5-af563f17b394"). InnerVolumeSpecName "kube-api-access-z85fc". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 20:15:04 crc kubenswrapper[4779]: I0929 20:15:04.383750 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-z85fc\" (UniqueName: \"kubernetes.io/projected/ee3b1085-55b4-4841-bfb5-af563f17b394-kube-api-access-z85fc\") on node \"crc\" DevicePath \"\"" Sep 29 20:15:04 crc kubenswrapper[4779]: I0929 20:15:04.383804 4779 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/ee3b1085-55b4-4841-bfb5-af563f17b394-host\") on node \"crc\" DevicePath \"\"" Sep 29 20:15:04 crc kubenswrapper[4779]: I0929 20:15:04.500494 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319570-j2rbd"] Sep 29 20:15:04 crc kubenswrapper[4779]: I0929 20:15:04.511786 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319570-j2rbd"] Sep 29 20:15:05 crc kubenswrapper[4779]: I0929 20:15:05.083966 4779 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2985da5c7fcff9aca2549708e18f27b8134311fabf8142f01be03c4d5f8ccdd7" Sep 29 20:15:05 crc kubenswrapper[4779]: I0929 20:15:05.084059 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-q2hkw/crc-debug-wxmdc" Sep 29 20:15:05 crc kubenswrapper[4779]: E0929 20:15:05.160591 4779 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podee3b1085_55b4_4841_bfb5_af563f17b394.slice/crio-2985da5c7fcff9aca2549708e18f27b8134311fabf8142f01be03c4d5f8ccdd7\": RecentStats: unable to find data in memory cache]" Sep 29 20:15:05 crc kubenswrapper[4779]: I0929 20:15:05.437660 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-q2hkw/crc-debug-ml5m9"] Sep 29 20:15:05 crc kubenswrapper[4779]: E0929 20:15:05.438471 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ee3b1085-55b4-4841-bfb5-af563f17b394" containerName="container-00" Sep 29 20:15:05 crc kubenswrapper[4779]: I0929 20:15:05.438489 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="ee3b1085-55b4-4841-bfb5-af563f17b394" containerName="container-00" Sep 29 20:15:05 crc kubenswrapper[4779]: E0929 20:15:05.438507 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="55746335-4c13-494f-be13-f3450113766b" containerName="collect-profiles" Sep 29 20:15:05 crc kubenswrapper[4779]: I0929 20:15:05.438515 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="55746335-4c13-494f-be13-f3450113766b" containerName="collect-profiles" Sep 29 20:15:05 crc kubenswrapper[4779]: I0929 20:15:05.438759 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="ee3b1085-55b4-4841-bfb5-af563f17b394" containerName="container-00" Sep 29 20:15:05 crc kubenswrapper[4779]: I0929 20:15:05.438779 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="55746335-4c13-494f-be13-f3450113766b" containerName="collect-profiles" Sep 29 20:15:05 crc kubenswrapper[4779]: I0929 20:15:05.439543 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-q2hkw/crc-debug-ml5m9" Sep 29 20:15:05 crc kubenswrapper[4779]: I0929 20:15:05.507582 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/d3c71d8e-be1a-4ece-ae48-3cdd4b405d16-host\") pod \"crc-debug-ml5m9\" (UID: \"d3c71d8e-be1a-4ece-ae48-3cdd4b405d16\") " pod="openshift-must-gather-q2hkw/crc-debug-ml5m9" Sep 29 20:15:05 crc kubenswrapper[4779]: I0929 20:15:05.507710 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hjb6j\" (UniqueName: \"kubernetes.io/projected/d3c71d8e-be1a-4ece-ae48-3cdd4b405d16-kube-api-access-hjb6j\") pod \"crc-debug-ml5m9\" (UID: \"d3c71d8e-be1a-4ece-ae48-3cdd4b405d16\") " pod="openshift-must-gather-q2hkw/crc-debug-ml5m9" Sep 29 20:15:05 crc kubenswrapper[4779]: I0929 20:15:05.609199 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/d3c71d8e-be1a-4ece-ae48-3cdd4b405d16-host\") pod \"crc-debug-ml5m9\" (UID: \"d3c71d8e-be1a-4ece-ae48-3cdd4b405d16\") " pod="openshift-must-gather-q2hkw/crc-debug-ml5m9" Sep 29 20:15:05 crc kubenswrapper[4779]: I0929 20:15:05.609449 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/d3c71d8e-be1a-4ece-ae48-3cdd4b405d16-host\") pod \"crc-debug-ml5m9\" (UID: \"d3c71d8e-be1a-4ece-ae48-3cdd4b405d16\") " pod="openshift-must-gather-q2hkw/crc-debug-ml5m9" Sep 29 20:15:05 crc kubenswrapper[4779]: I0929 20:15:05.609466 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hjb6j\" (UniqueName: \"kubernetes.io/projected/d3c71d8e-be1a-4ece-ae48-3cdd4b405d16-kube-api-access-hjb6j\") pod \"crc-debug-ml5m9\" (UID: \"d3c71d8e-be1a-4ece-ae48-3cdd4b405d16\") " pod="openshift-must-gather-q2hkw/crc-debug-ml5m9" Sep 29 20:15:05 crc kubenswrapper[4779]: I0929 20:15:05.637815 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hjb6j\" (UniqueName: \"kubernetes.io/projected/d3c71d8e-be1a-4ece-ae48-3cdd4b405d16-kube-api-access-hjb6j\") pod \"crc-debug-ml5m9\" (UID: \"d3c71d8e-be1a-4ece-ae48-3cdd4b405d16\") " pod="openshift-must-gather-q2hkw/crc-debug-ml5m9" Sep 29 20:15:05 crc kubenswrapper[4779]: I0929 20:15:05.766911 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-q2hkw/crc-debug-ml5m9" Sep 29 20:15:05 crc kubenswrapper[4779]: I0929 20:15:05.786169 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b668ad5e-5ccd-48ef-9a1d-a42bd4b7f3ab" path="/var/lib/kubelet/pods/b668ad5e-5ccd-48ef-9a1d-a42bd4b7f3ab/volumes" Sep 29 20:15:05 crc kubenswrapper[4779]: I0929 20:15:05.787501 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ee3b1085-55b4-4841-bfb5-af563f17b394" path="/var/lib/kubelet/pods/ee3b1085-55b4-4841-bfb5-af563f17b394/volumes" Sep 29 20:15:05 crc kubenswrapper[4779]: W0929 20:15:05.804534 4779 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd3c71d8e_be1a_4ece_ae48_3cdd4b405d16.slice/crio-db016be6a0b78cd86f413dd1a6edfe3e50e1d9b8788862f8a1d93d875fd21f84 WatchSource:0}: Error finding container db016be6a0b78cd86f413dd1a6edfe3e50e1d9b8788862f8a1d93d875fd21f84: Status 404 returned error can't find the container with id db016be6a0b78cd86f413dd1a6edfe3e50e1d9b8788862f8a1d93d875fd21f84 Sep 29 20:15:06 crc kubenswrapper[4779]: I0929 20:15:06.095444 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-q2hkw/crc-debug-ml5m9" event={"ID":"d3c71d8e-be1a-4ece-ae48-3cdd4b405d16","Type":"ContainerStarted","Data":"8fd1b2dd08a8016d27db3e04201ddd01e64db4bfab220c580d52940db0983804"} Sep 29 20:15:06 crc kubenswrapper[4779]: I0929 20:15:06.096801 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-q2hkw/crc-debug-ml5m9" event={"ID":"d3c71d8e-be1a-4ece-ae48-3cdd4b405d16","Type":"ContainerStarted","Data":"db016be6a0b78cd86f413dd1a6edfe3e50e1d9b8788862f8a1d93d875fd21f84"} Sep 29 20:15:06 crc kubenswrapper[4779]: I0929 20:15:06.116118 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-q2hkw/crc-debug-ml5m9" podStartSLOduration=1.116081379 podStartE2EDuration="1.116081379s" podCreationTimestamp="2025-09-29 20:15:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 20:15:06.113802547 +0000 UTC m=+4016.998227707" watchObservedRunningTime="2025-09-29 20:15:06.116081379 +0000 UTC m=+4017.000506519" Sep 29 20:15:07 crc kubenswrapper[4779]: I0929 20:15:07.105876 4779 generic.go:334] "Generic (PLEG): container finished" podID="d3c71d8e-be1a-4ece-ae48-3cdd4b405d16" containerID="8fd1b2dd08a8016d27db3e04201ddd01e64db4bfab220c580d52940db0983804" exitCode=0 Sep 29 20:15:07 crc kubenswrapper[4779]: I0929 20:15:07.105944 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-q2hkw/crc-debug-ml5m9" event={"ID":"d3c71d8e-be1a-4ece-ae48-3cdd4b405d16","Type":"ContainerDied","Data":"8fd1b2dd08a8016d27db3e04201ddd01e64db4bfab220c580d52940db0983804"} Sep 29 20:15:08 crc kubenswrapper[4779]: I0929 20:15:08.213215 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-q2hkw/crc-debug-ml5m9" Sep 29 20:15:08 crc kubenswrapper[4779]: I0929 20:15:08.353924 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hjb6j\" (UniqueName: \"kubernetes.io/projected/d3c71d8e-be1a-4ece-ae48-3cdd4b405d16-kube-api-access-hjb6j\") pod \"d3c71d8e-be1a-4ece-ae48-3cdd4b405d16\" (UID: \"d3c71d8e-be1a-4ece-ae48-3cdd4b405d16\") " Sep 29 20:15:08 crc kubenswrapper[4779]: I0929 20:15:08.354257 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/d3c71d8e-be1a-4ece-ae48-3cdd4b405d16-host\") pod \"d3c71d8e-be1a-4ece-ae48-3cdd4b405d16\" (UID: \"d3c71d8e-be1a-4ece-ae48-3cdd4b405d16\") " Sep 29 20:15:08 crc kubenswrapper[4779]: I0929 20:15:08.354665 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/d3c71d8e-be1a-4ece-ae48-3cdd4b405d16-host" (OuterVolumeSpecName: "host") pod "d3c71d8e-be1a-4ece-ae48-3cdd4b405d16" (UID: "d3c71d8e-be1a-4ece-ae48-3cdd4b405d16"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 29 20:15:08 crc kubenswrapper[4779]: I0929 20:15:08.358796 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d3c71d8e-be1a-4ece-ae48-3cdd4b405d16-kube-api-access-hjb6j" (OuterVolumeSpecName: "kube-api-access-hjb6j") pod "d3c71d8e-be1a-4ece-ae48-3cdd4b405d16" (UID: "d3c71d8e-be1a-4ece-ae48-3cdd4b405d16"). InnerVolumeSpecName "kube-api-access-hjb6j". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 20:15:08 crc kubenswrapper[4779]: I0929 20:15:08.455522 4779 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/d3c71d8e-be1a-4ece-ae48-3cdd4b405d16-host\") on node \"crc\" DevicePath \"\"" Sep 29 20:15:08 crc kubenswrapper[4779]: I0929 20:15:08.455551 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hjb6j\" (UniqueName: \"kubernetes.io/projected/d3c71d8e-be1a-4ece-ae48-3cdd4b405d16-kube-api-access-hjb6j\") on node \"crc\" DevicePath \"\"" Sep 29 20:15:09 crc kubenswrapper[4779]: I0929 20:15:09.128167 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-q2hkw/crc-debug-ml5m9" event={"ID":"d3c71d8e-be1a-4ece-ae48-3cdd4b405d16","Type":"ContainerDied","Data":"db016be6a0b78cd86f413dd1a6edfe3e50e1d9b8788862f8a1d93d875fd21f84"} Sep 29 20:15:09 crc kubenswrapper[4779]: I0929 20:15:09.128513 4779 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="db016be6a0b78cd86f413dd1a6edfe3e50e1d9b8788862f8a1d93d875fd21f84" Sep 29 20:15:09 crc kubenswrapper[4779]: I0929 20:15:09.128203 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-q2hkw/crc-debug-ml5m9" Sep 29 20:15:09 crc kubenswrapper[4779]: I0929 20:15:09.776195 4779 scope.go:117] "RemoveContainer" containerID="ce7c9aaefc0e94314f1e9cd9f04a105e1ba3317878327817d7948af06e95f619" Sep 29 20:15:09 crc kubenswrapper[4779]: E0929 20:15:09.776483 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d5cnr_openshift-machine-config-operator(476bc421-1113-455e-bcc8-e207e47dad19)\"" pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" podUID="476bc421-1113-455e-bcc8-e207e47dad19" Sep 29 20:15:12 crc kubenswrapper[4779]: I0929 20:15:12.855004 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-q2hkw/crc-debug-ml5m9"] Sep 29 20:15:12 crc kubenswrapper[4779]: I0929 20:15:12.865866 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-q2hkw/crc-debug-ml5m9"] Sep 29 20:15:13 crc kubenswrapper[4779]: I0929 20:15:13.782718 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d3c71d8e-be1a-4ece-ae48-3cdd4b405d16" path="/var/lib/kubelet/pods/d3c71d8e-be1a-4ece-ae48-3cdd4b405d16/volumes" Sep 29 20:15:14 crc kubenswrapper[4779]: I0929 20:15:14.071935 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-q2hkw/crc-debug-p6mnw"] Sep 29 20:15:14 crc kubenswrapper[4779]: E0929 20:15:14.072411 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d3c71d8e-be1a-4ece-ae48-3cdd4b405d16" containerName="container-00" Sep 29 20:15:14 crc kubenswrapper[4779]: I0929 20:15:14.072431 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="d3c71d8e-be1a-4ece-ae48-3cdd4b405d16" containerName="container-00" Sep 29 20:15:14 crc kubenswrapper[4779]: I0929 20:15:14.072717 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="d3c71d8e-be1a-4ece-ae48-3cdd4b405d16" containerName="container-00" Sep 29 20:15:14 crc kubenswrapper[4779]: I0929 20:15:14.073539 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-q2hkw/crc-debug-p6mnw" Sep 29 20:15:14 crc kubenswrapper[4779]: I0929 20:15:14.148749 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/5b2e5468-b1c5-4aed-8a04-21921042c860-host\") pod \"crc-debug-p6mnw\" (UID: \"5b2e5468-b1c5-4aed-8a04-21921042c860\") " pod="openshift-must-gather-q2hkw/crc-debug-p6mnw" Sep 29 20:15:14 crc kubenswrapper[4779]: I0929 20:15:14.149168 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2lvrf\" (UniqueName: \"kubernetes.io/projected/5b2e5468-b1c5-4aed-8a04-21921042c860-kube-api-access-2lvrf\") pod \"crc-debug-p6mnw\" (UID: \"5b2e5468-b1c5-4aed-8a04-21921042c860\") " pod="openshift-must-gather-q2hkw/crc-debug-p6mnw" Sep 29 20:15:14 crc kubenswrapper[4779]: I0929 20:15:14.250624 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2lvrf\" (UniqueName: \"kubernetes.io/projected/5b2e5468-b1c5-4aed-8a04-21921042c860-kube-api-access-2lvrf\") pod \"crc-debug-p6mnw\" (UID: \"5b2e5468-b1c5-4aed-8a04-21921042c860\") " pod="openshift-must-gather-q2hkw/crc-debug-p6mnw" Sep 29 20:15:14 crc kubenswrapper[4779]: I0929 20:15:14.250912 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/5b2e5468-b1c5-4aed-8a04-21921042c860-host\") pod \"crc-debug-p6mnw\" (UID: \"5b2e5468-b1c5-4aed-8a04-21921042c860\") " pod="openshift-must-gather-q2hkw/crc-debug-p6mnw" Sep 29 20:15:14 crc kubenswrapper[4779]: I0929 20:15:14.251012 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/5b2e5468-b1c5-4aed-8a04-21921042c860-host\") pod \"crc-debug-p6mnw\" (UID: \"5b2e5468-b1c5-4aed-8a04-21921042c860\") " pod="openshift-must-gather-q2hkw/crc-debug-p6mnw" Sep 29 20:15:14 crc kubenswrapper[4779]: I0929 20:15:14.269564 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2lvrf\" (UniqueName: \"kubernetes.io/projected/5b2e5468-b1c5-4aed-8a04-21921042c860-kube-api-access-2lvrf\") pod \"crc-debug-p6mnw\" (UID: \"5b2e5468-b1c5-4aed-8a04-21921042c860\") " pod="openshift-must-gather-q2hkw/crc-debug-p6mnw" Sep 29 20:15:14 crc kubenswrapper[4779]: I0929 20:15:14.390299 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-q2hkw/crc-debug-p6mnw" Sep 29 20:15:15 crc kubenswrapper[4779]: I0929 20:15:15.183190 4779 generic.go:334] "Generic (PLEG): container finished" podID="5b2e5468-b1c5-4aed-8a04-21921042c860" containerID="0f91d9a475870016f93ec02a26f6e7e0bfb216ff9f8a7a8c73219f0fd9e4229c" exitCode=0 Sep 29 20:15:15 crc kubenswrapper[4779]: I0929 20:15:15.183306 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-q2hkw/crc-debug-p6mnw" event={"ID":"5b2e5468-b1c5-4aed-8a04-21921042c860","Type":"ContainerDied","Data":"0f91d9a475870016f93ec02a26f6e7e0bfb216ff9f8a7a8c73219f0fd9e4229c"} Sep 29 20:15:15 crc kubenswrapper[4779]: I0929 20:15:15.183868 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-q2hkw/crc-debug-p6mnw" event={"ID":"5b2e5468-b1c5-4aed-8a04-21921042c860","Type":"ContainerStarted","Data":"b458e9803de403bb80bf01efb484f983ace671c535f92bc5a5926a122d88cad5"} Sep 29 20:15:15 crc kubenswrapper[4779]: I0929 20:15:15.266900 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-q2hkw/crc-debug-p6mnw"] Sep 29 20:15:15 crc kubenswrapper[4779]: I0929 20:15:15.275398 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-q2hkw/crc-debug-p6mnw"] Sep 29 20:15:16 crc kubenswrapper[4779]: I0929 20:15:16.306517 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-q2hkw/crc-debug-p6mnw" Sep 29 20:15:16 crc kubenswrapper[4779]: I0929 20:15:16.392997 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/5b2e5468-b1c5-4aed-8a04-21921042c860-host\") pod \"5b2e5468-b1c5-4aed-8a04-21921042c860\" (UID: \"5b2e5468-b1c5-4aed-8a04-21921042c860\") " Sep 29 20:15:16 crc kubenswrapper[4779]: I0929 20:15:16.393089 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/5b2e5468-b1c5-4aed-8a04-21921042c860-host" (OuterVolumeSpecName: "host") pod "5b2e5468-b1c5-4aed-8a04-21921042c860" (UID: "5b2e5468-b1c5-4aed-8a04-21921042c860"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 29 20:15:16 crc kubenswrapper[4779]: I0929 20:15:16.393158 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2lvrf\" (UniqueName: \"kubernetes.io/projected/5b2e5468-b1c5-4aed-8a04-21921042c860-kube-api-access-2lvrf\") pod \"5b2e5468-b1c5-4aed-8a04-21921042c860\" (UID: \"5b2e5468-b1c5-4aed-8a04-21921042c860\") " Sep 29 20:15:16 crc kubenswrapper[4779]: I0929 20:15:16.394520 4779 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/5b2e5468-b1c5-4aed-8a04-21921042c860-host\") on node \"crc\" DevicePath \"\"" Sep 29 20:15:16 crc kubenswrapper[4779]: I0929 20:15:16.400517 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5b2e5468-b1c5-4aed-8a04-21921042c860-kube-api-access-2lvrf" (OuterVolumeSpecName: "kube-api-access-2lvrf") pod "5b2e5468-b1c5-4aed-8a04-21921042c860" (UID: "5b2e5468-b1c5-4aed-8a04-21921042c860"). InnerVolumeSpecName "kube-api-access-2lvrf". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 20:15:16 crc kubenswrapper[4779]: I0929 20:15:16.496470 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2lvrf\" (UniqueName: \"kubernetes.io/projected/5b2e5468-b1c5-4aed-8a04-21921042c860-kube-api-access-2lvrf\") on node \"crc\" DevicePath \"\"" Sep 29 20:15:16 crc kubenswrapper[4779]: I0929 20:15:16.980757 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_b0ce77c3c22bafa9d9845daaf088adfa5ab8db9a8342ffc3a6e0496b69h87gh_e83c2f7a-1165-4d7b-ba2f-ec47f6291cfd/util/0.log" Sep 29 20:15:17 crc kubenswrapper[4779]: I0929 20:15:17.137876 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_b0ce77c3c22bafa9d9845daaf088adfa5ab8db9a8342ffc3a6e0496b69h87gh_e83c2f7a-1165-4d7b-ba2f-ec47f6291cfd/util/0.log" Sep 29 20:15:17 crc kubenswrapper[4779]: I0929 20:15:17.170558 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_b0ce77c3c22bafa9d9845daaf088adfa5ab8db9a8342ffc3a6e0496b69h87gh_e83c2f7a-1165-4d7b-ba2f-ec47f6291cfd/pull/0.log" Sep 29 20:15:17 crc kubenswrapper[4779]: I0929 20:15:17.190768 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_b0ce77c3c22bafa9d9845daaf088adfa5ab8db9a8342ffc3a6e0496b69h87gh_e83c2f7a-1165-4d7b-ba2f-ec47f6291cfd/pull/0.log" Sep 29 20:15:17 crc kubenswrapper[4779]: I0929 20:15:17.201109 4779 scope.go:117] "RemoveContainer" containerID="0f91d9a475870016f93ec02a26f6e7e0bfb216ff9f8a7a8c73219f0fd9e4229c" Sep 29 20:15:17 crc kubenswrapper[4779]: I0929 20:15:17.201162 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-q2hkw/crc-debug-p6mnw" Sep 29 20:15:17 crc kubenswrapper[4779]: I0929 20:15:17.520082 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_b0ce77c3c22bafa9d9845daaf088adfa5ab8db9a8342ffc3a6e0496b69h87gh_e83c2f7a-1165-4d7b-ba2f-ec47f6291cfd/util/0.log" Sep 29 20:15:17 crc kubenswrapper[4779]: I0929 20:15:17.524697 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_b0ce77c3c22bafa9d9845daaf088adfa5ab8db9a8342ffc3a6e0496b69h87gh_e83c2f7a-1165-4d7b-ba2f-ec47f6291cfd/pull/0.log" Sep 29 20:15:17 crc kubenswrapper[4779]: I0929 20:15:17.558778 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_b0ce77c3c22bafa9d9845daaf088adfa5ab8db9a8342ffc3a6e0496b69h87gh_e83c2f7a-1165-4d7b-ba2f-ec47f6291cfd/extract/0.log" Sep 29 20:15:17 crc kubenswrapper[4779]: I0929 20:15:17.778204 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5b2e5468-b1c5-4aed-8a04-21921042c860" path="/var/lib/kubelet/pods/5b2e5468-b1c5-4aed-8a04-21921042c860/volumes" Sep 29 20:15:18 crc kubenswrapper[4779]: I0929 20:15:18.248280 4779 scope.go:117] "RemoveContainer" containerID="c9c67766d333b412557235c05f208d0ebd2a98c525f7d01f62bfbccae78dba4c" Sep 29 20:15:18 crc kubenswrapper[4779]: I0929 20:15:18.267076 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-6ff8b75857-l2cw6_7a72bb32-1401-4fb3-a8b5-e2c9d3c7e997/kube-rbac-proxy/0.log" Sep 29 20:15:18 crc kubenswrapper[4779]: I0929 20:15:18.269840 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-644bddb6d8-txbkn_5d20194a-c49a-4da1-a081-23d5c3bde845/kube-rbac-proxy/0.log" Sep 29 20:15:18 crc kubenswrapper[4779]: I0929 20:15:18.296474 4779 scope.go:117] "RemoveContainer" containerID="18befd3a77b3f05971a2e5c5eb1a3a99661e525fefd3a97f46e0422281ca1307" Sep 29 20:15:18 crc kubenswrapper[4779]: I0929 20:15:18.322807 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-6ff8b75857-l2cw6_7a72bb32-1401-4fb3-a8b5-e2c9d3c7e997/manager/0.log" Sep 29 20:15:18 crc kubenswrapper[4779]: I0929 20:15:18.507364 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-644bddb6d8-txbkn_5d20194a-c49a-4da1-a081-23d5c3bde845/manager/0.log" Sep 29 20:15:18 crc kubenswrapper[4779]: I0929 20:15:18.540036 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-84f4f7b77b-kmgvf_0e2748e0-ee66-45a1-b018-0798ad0ef293/manager/0.log" Sep 29 20:15:18 crc kubenswrapper[4779]: I0929 20:15:18.545150 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-84f4f7b77b-kmgvf_0e2748e0-ee66-45a1-b018-0798ad0ef293/kube-rbac-proxy/0.log" Sep 29 20:15:18 crc kubenswrapper[4779]: I0929 20:15:18.742568 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-84958c4d49-9xr2s_68c5f3eb-52f4-4ede-ac89-f3a9aafe421b/kube-rbac-proxy/0.log" Sep 29 20:15:18 crc kubenswrapper[4779]: I0929 20:15:18.864860 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-84958c4d49-9xr2s_68c5f3eb-52f4-4ede-ac89-f3a9aafe421b/manager/0.log" Sep 29 20:15:18 crc kubenswrapper[4779]: I0929 20:15:18.910219 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-5d889d78cf-fql2p_df41de35-4c6d-4313-8ccb-19dcead38269/kube-rbac-proxy/0.log" Sep 29 20:15:19 crc kubenswrapper[4779]: I0929 20:15:19.006490 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-5d889d78cf-fql2p_df41de35-4c6d-4313-8ccb-19dcead38269/manager/0.log" Sep 29 20:15:19 crc kubenswrapper[4779]: I0929 20:15:19.060435 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-9f4696d94-8ndmd_0b008477-9497-4cb1-9b44-c8c0dacbd0ae/kube-rbac-proxy/0.log" Sep 29 20:15:19 crc kubenswrapper[4779]: I0929 20:15:19.105996 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-9f4696d94-8ndmd_0b008477-9497-4cb1-9b44-c8c0dacbd0ae/manager/0.log" Sep 29 20:15:19 crc kubenswrapper[4779]: I0929 20:15:19.271146 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-7d857cc749-7pf7d_50a9326b-f577-4994-ba3a-28f1ffb1df6c/kube-rbac-proxy/0.log" Sep 29 20:15:19 crc kubenswrapper[4779]: I0929 20:15:19.391299 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-7d857cc749-7pf7d_50a9326b-f577-4994-ba3a-28f1ffb1df6c/manager/0.log" Sep 29 20:15:19 crc kubenswrapper[4779]: I0929 20:15:19.497016 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-7975b88857-46trm_36748ad5-2673-4d95-ada2-7ff95f740fa9/manager/0.log" Sep 29 20:15:19 crc kubenswrapper[4779]: I0929 20:15:19.497621 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-7975b88857-46trm_36748ad5-2673-4d95-ada2-7ff95f740fa9/kube-rbac-proxy/0.log" Sep 29 20:15:19 crc kubenswrapper[4779]: I0929 20:15:19.940204 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-6d68dbc695-6x9z9_15635458-2ece-4c4b-a011-1c82d097bfdf/kube-rbac-proxy/0.log" Sep 29 20:15:19 crc kubenswrapper[4779]: I0929 20:15:19.998692 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-5bd55b4bff-svxsl_65ba17f1-ab1d-4e5b-b204-2ecc74c7daa1/kube-rbac-proxy/0.log" Sep 29 20:15:20 crc kubenswrapper[4779]: I0929 20:15:20.025520 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-5bd55b4bff-svxsl_65ba17f1-ab1d-4e5b-b204-2ecc74c7daa1/manager/0.log" Sep 29 20:15:20 crc kubenswrapper[4779]: I0929 20:15:20.162724 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-6d68dbc695-6x9z9_15635458-2ece-4c4b-a011-1c82d097bfdf/manager/0.log" Sep 29 20:15:20 crc kubenswrapper[4779]: I0929 20:15:20.196891 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-88c7-k26dh_833ffe0d-b2b2-4fd5-8094-ad9fe58f60c0/kube-rbac-proxy/0.log" Sep 29 20:15:20 crc kubenswrapper[4779]: I0929 20:15:20.232748 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-88c7-k26dh_833ffe0d-b2b2-4fd5-8094-ad9fe58f60c0/manager/0.log" Sep 29 20:15:20 crc kubenswrapper[4779]: I0929 20:15:20.383613 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-64d7b59854-kvpnd_9ae9e131-70db-4bd1-8347-c5714c2b4754/kube-rbac-proxy/0.log" Sep 29 20:15:20 crc kubenswrapper[4779]: I0929 20:15:20.452954 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-64d7b59854-kvpnd_9ae9e131-70db-4bd1-8347-c5714c2b4754/manager/0.log" Sep 29 20:15:20 crc kubenswrapper[4779]: I0929 20:15:20.466084 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-c7c776c96-94ldb_9449cbcb-f74f-473e-9c0d-f1737b39c383/kube-rbac-proxy/0.log" Sep 29 20:15:20 crc kubenswrapper[4779]: I0929 20:15:20.663482 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-c7c776c96-94ldb_9449cbcb-f74f-473e-9c0d-f1737b39c383/manager/0.log" Sep 29 20:15:20 crc kubenswrapper[4779]: I0929 20:15:20.669999 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-76fcc6dc7c-mtqwc_1c3a147f-0c72-4889-80aa-8b53a0c9ea3f/kube-rbac-proxy/0.log" Sep 29 20:15:20 crc kubenswrapper[4779]: I0929 20:15:20.705309 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-76fcc6dc7c-mtqwc_1c3a147f-0c72-4889-80aa-8b53a0c9ea3f/manager/0.log" Sep 29 20:15:20 crc kubenswrapper[4779]: I0929 20:15:20.834845 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-6d776955-54f6z_49adbd52-e22f-4cc8-9ccb-6cb8c64f12f4/kube-rbac-proxy/0.log" Sep 29 20:15:20 crc kubenswrapper[4779]: I0929 20:15:20.884715 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-manager-f846cdb6-8tk8p_3c04c1cb-ecb2-42a0-82e6-3c2842508041/kube-rbac-proxy/0.log" Sep 29 20:15:20 crc kubenswrapper[4779]: I0929 20:15:20.889139 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-6d776955-54f6z_49adbd52-e22f-4cc8-9ccb-6cb8c64f12f4/manager/0.log" Sep 29 20:15:21 crc kubenswrapper[4779]: I0929 20:15:21.045559 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-operator-764cfd59bc-2w6ls_8988ff92-ee96-4702-875b-a311c8d08a7b/kube-rbac-proxy/0.log" Sep 29 20:15:21 crc kubenswrapper[4779]: I0929 20:15:21.293338 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-operator-764cfd59bc-2w6ls_8988ff92-ee96-4702-875b-a311c8d08a7b/operator/0.log" Sep 29 20:15:21 crc kubenswrapper[4779]: I0929 20:15:21.359694 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-index-pwzpf_96bf8594-ea42-4a66-baa7-39679ec2aa5d/registry-server/0.log" Sep 29 20:15:21 crc kubenswrapper[4779]: I0929 20:15:21.477171 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-9976ff44c-f7xwn_63554382-d024-4d43-b5c5-b31b80d47749/kube-rbac-proxy/0.log" Sep 29 20:15:21 crc kubenswrapper[4779]: I0929 20:15:21.651959 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-9976ff44c-f7xwn_63554382-d024-4d43-b5c5-b31b80d47749/manager/0.log" Sep 29 20:15:21 crc kubenswrapper[4779]: I0929 20:15:21.686761 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-589c58c6c-8fjvn_dfc872d3-d6c0-42af-9ab7-7695257d969f/kube-rbac-proxy/0.log" Sep 29 20:15:21 crc kubenswrapper[4779]: I0929 20:15:21.762596 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-589c58c6c-8fjvn_dfc872d3-d6c0-42af-9ab7-7695257d969f/manager/0.log" Sep 29 20:15:21 crc kubenswrapper[4779]: I0929 20:15:21.831544 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_rabbitmq-cluster-operator-manager-79d8469568-wl7d2_5c9afdac-c252-4cd4-afb1-9d7fb43d86e1/operator/0.log" Sep 29 20:15:21 crc kubenswrapper[4779]: I0929 20:15:21.931016 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-manager-f846cdb6-8tk8p_3c04c1cb-ecb2-42a0-82e6-3c2842508041/manager/0.log" Sep 29 20:15:21 crc kubenswrapper[4779]: I0929 20:15:21.968465 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-bc7dc7bd9-9jgwn_38ef5cba-94db-4e0d-b2ad-290293848c65/kube-rbac-proxy/0.log" Sep 29 20:15:21 crc kubenswrapper[4779]: I0929 20:15:21.988705 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-bc7dc7bd9-9jgwn_38ef5cba-94db-4e0d-b2ad-290293848c65/manager/0.log" Sep 29 20:15:22 crc kubenswrapper[4779]: I0929 20:15:22.122068 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-b8d54b5d7-tflz6_cdc62734-f794-43fc-9af8-752098cdf316/kube-rbac-proxy/0.log" Sep 29 20:15:22 crc kubenswrapper[4779]: I0929 20:15:22.181408 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-f66b554c6-dlgq2_13740318-83f1-4384-9b4c-b8de793773d3/kube-rbac-proxy/0.log" Sep 29 20:15:22 crc kubenswrapper[4779]: I0929 20:15:22.219189 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-b8d54b5d7-tflz6_cdc62734-f794-43fc-9af8-752098cdf316/manager/0.log" Sep 29 20:15:22 crc kubenswrapper[4779]: I0929 20:15:22.312929 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-f66b554c6-dlgq2_13740318-83f1-4384-9b4c-b8de793773d3/manager/0.log" Sep 29 20:15:22 crc kubenswrapper[4779]: I0929 20:15:22.391572 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-76669f99c-crptq_a15b1202-c010-40ae-be51-75fbb766fba0/kube-rbac-proxy/0.log" Sep 29 20:15:22 crc kubenswrapper[4779]: I0929 20:15:22.421574 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-76669f99c-crptq_a15b1202-c010-40ae-be51-75fbb766fba0/manager/0.log" Sep 29 20:15:23 crc kubenswrapper[4779]: I0929 20:15:23.766411 4779 scope.go:117] "RemoveContainer" containerID="ce7c9aaefc0e94314f1e9cd9f04a105e1ba3317878327817d7948af06e95f619" Sep 29 20:15:23 crc kubenswrapper[4779]: E0929 20:15:23.767191 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d5cnr_openshift-machine-config-operator(476bc421-1113-455e-bcc8-e207e47dad19)\"" pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" podUID="476bc421-1113-455e-bcc8-e207e47dad19" Sep 29 20:15:37 crc kubenswrapper[4779]: I0929 20:15:37.770172 4779 scope.go:117] "RemoveContainer" containerID="ce7c9aaefc0e94314f1e9cd9f04a105e1ba3317878327817d7948af06e95f619" Sep 29 20:15:37 crc kubenswrapper[4779]: E0929 20:15:37.771188 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d5cnr_openshift-machine-config-operator(476bc421-1113-455e-bcc8-e207e47dad19)\"" pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" podUID="476bc421-1113-455e-bcc8-e207e47dad19" Sep 29 20:15:39 crc kubenswrapper[4779]: I0929 20:15:39.381540 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_control-plane-machine-set-operator-78cbb6b69f-fcw8k_e9fff169-c8a1-4062-9a2a-ab4c1e790c07/control-plane-machine-set-operator/0.log" Sep 29 20:15:39 crc kubenswrapper[4779]: I0929 20:15:39.382051 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-66qtp_0ab1562e-a39e-4ddf-95ee-cf6ff520883d/kube-rbac-proxy/0.log" Sep 29 20:15:39 crc kubenswrapper[4779]: I0929 20:15:39.535139 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-66qtp_0ab1562e-a39e-4ddf-95ee-cf6ff520883d/machine-api-operator/0.log" Sep 29 20:15:49 crc kubenswrapper[4779]: I0929 20:15:49.771565 4779 scope.go:117] "RemoveContainer" containerID="ce7c9aaefc0e94314f1e9cd9f04a105e1ba3317878327817d7948af06e95f619" Sep 29 20:15:49 crc kubenswrapper[4779]: E0929 20:15:49.772371 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d5cnr_openshift-machine-config-operator(476bc421-1113-455e-bcc8-e207e47dad19)\"" pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" podUID="476bc421-1113-455e-bcc8-e207e47dad19" Sep 29 20:15:54 crc kubenswrapper[4779]: I0929 20:15:54.132924 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-5b446d88c5-g5jq2_599cf28e-c7ed-4c1a-a84d-ae90ec0708ba/cert-manager-controller/0.log" Sep 29 20:15:54 crc kubenswrapper[4779]: I0929 20:15:54.264549 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-cainjector-7f985d654d-zpt2c_d0e20784-de41-4e9c-8c95-f047e75f30fd/cert-manager-cainjector/0.log" Sep 29 20:15:54 crc kubenswrapper[4779]: I0929 20:15:54.356897 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-webhook-5655c58dd6-bf8nn_3566f49d-2c94-40d8-b5b1-aa51cc7c043b/cert-manager-webhook/0.log" Sep 29 20:16:01 crc kubenswrapper[4779]: I0929 20:16:01.767017 4779 scope.go:117] "RemoveContainer" containerID="ce7c9aaefc0e94314f1e9cd9f04a105e1ba3317878327817d7948af06e95f619" Sep 29 20:16:01 crc kubenswrapper[4779]: E0929 20:16:01.768023 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d5cnr_openshift-machine-config-operator(476bc421-1113-455e-bcc8-e207e47dad19)\"" pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" podUID="476bc421-1113-455e-bcc8-e207e47dad19" Sep 29 20:16:07 crc kubenswrapper[4779]: I0929 20:16:07.564462 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-console-plugin-864bb6dfb5-nvgwc_6c719c4d-6f34-4427-8c72-69a5c0efe754/nmstate-console-plugin/0.log" Sep 29 20:16:07 crc kubenswrapper[4779]: I0929 20:16:07.738485 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-handler-v9vn6_c74d7452-c58a-4336-9acf-acc9190816a9/nmstate-handler/0.log" Sep 29 20:16:07 crc kubenswrapper[4779]: I0929 20:16:07.803747 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-58fcddf996-zwk4x_c0c8d102-b0f5-4ebd-ad41-3359fd330e5c/kube-rbac-proxy/0.log" Sep 29 20:16:07 crc kubenswrapper[4779]: I0929 20:16:07.809042 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-58fcddf996-zwk4x_c0c8d102-b0f5-4ebd-ad41-3359fd330e5c/nmstate-metrics/0.log" Sep 29 20:16:07 crc kubenswrapper[4779]: I0929 20:16:07.957671 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-operator-5d6f6cfd66-98lp2_5df72cf6-fbb4-4160-afad-5fb056e747ed/nmstate-operator/0.log" Sep 29 20:16:07 crc kubenswrapper[4779]: I0929 20:16:07.995164 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-webhook-6d689559c5-qhlnm_ef026a49-4282-45ef-b535-288ac25fe011/nmstate-webhook/0.log" Sep 29 20:16:16 crc kubenswrapper[4779]: I0929 20:16:16.766782 4779 scope.go:117] "RemoveContainer" containerID="ce7c9aaefc0e94314f1e9cd9f04a105e1ba3317878327817d7948af06e95f619" Sep 29 20:16:16 crc kubenswrapper[4779]: E0929 20:16:16.767462 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d5cnr_openshift-machine-config-operator(476bc421-1113-455e-bcc8-e207e47dad19)\"" pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" podUID="476bc421-1113-455e-bcc8-e207e47dad19" Sep 29 20:16:23 crc kubenswrapper[4779]: I0929 20:16:23.084648 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-5d688f5ffc-l5gr5_127fd6d6-e32b-4152-9f62-23b6b051318d/kube-rbac-proxy/0.log" Sep 29 20:16:23 crc kubenswrapper[4779]: I0929 20:16:23.257342 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-5d688f5ffc-l5gr5_127fd6d6-e32b-4152-9f62-23b6b051318d/controller/0.log" Sep 29 20:16:23 crc kubenswrapper[4779]: I0929 20:16:23.286054 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-4lq9x_be71c6d8-ba0b-46da-a558-f6c91cfddd59/cp-frr-files/0.log" Sep 29 20:16:23 crc kubenswrapper[4779]: I0929 20:16:23.513677 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-4lq9x_be71c6d8-ba0b-46da-a558-f6c91cfddd59/cp-frr-files/0.log" Sep 29 20:16:23 crc kubenswrapper[4779]: I0929 20:16:23.548764 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-4lq9x_be71c6d8-ba0b-46da-a558-f6c91cfddd59/cp-reloader/0.log" Sep 29 20:16:23 crc kubenswrapper[4779]: I0929 20:16:23.562240 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-4lq9x_be71c6d8-ba0b-46da-a558-f6c91cfddd59/cp-metrics/0.log" Sep 29 20:16:23 crc kubenswrapper[4779]: I0929 20:16:23.582584 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-4lq9x_be71c6d8-ba0b-46da-a558-f6c91cfddd59/cp-reloader/0.log" Sep 29 20:16:23 crc kubenswrapper[4779]: I0929 20:16:23.722255 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-4lq9x_be71c6d8-ba0b-46da-a558-f6c91cfddd59/cp-reloader/0.log" Sep 29 20:16:23 crc kubenswrapper[4779]: I0929 20:16:23.738976 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-4lq9x_be71c6d8-ba0b-46da-a558-f6c91cfddd59/cp-metrics/0.log" Sep 29 20:16:23 crc kubenswrapper[4779]: I0929 20:16:23.769499 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-4lq9x_be71c6d8-ba0b-46da-a558-f6c91cfddd59/cp-metrics/0.log" Sep 29 20:16:23 crc kubenswrapper[4779]: I0929 20:16:23.797805 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-4lq9x_be71c6d8-ba0b-46da-a558-f6c91cfddd59/cp-frr-files/0.log" Sep 29 20:16:23 crc kubenswrapper[4779]: I0929 20:16:23.938396 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-4lq9x_be71c6d8-ba0b-46da-a558-f6c91cfddd59/cp-frr-files/0.log" Sep 29 20:16:23 crc kubenswrapper[4779]: I0929 20:16:23.967884 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-4lq9x_be71c6d8-ba0b-46da-a558-f6c91cfddd59/cp-reloader/0.log" Sep 29 20:16:23 crc kubenswrapper[4779]: I0929 20:16:23.983222 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-4lq9x_be71c6d8-ba0b-46da-a558-f6c91cfddd59/cp-metrics/0.log" Sep 29 20:16:23 crc kubenswrapper[4779]: I0929 20:16:23.992821 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-4lq9x_be71c6d8-ba0b-46da-a558-f6c91cfddd59/controller/0.log" Sep 29 20:16:24 crc kubenswrapper[4779]: I0929 20:16:24.131256 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-4lq9x_be71c6d8-ba0b-46da-a558-f6c91cfddd59/frr-metrics/0.log" Sep 29 20:16:24 crc kubenswrapper[4779]: I0929 20:16:24.167932 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-4lq9x_be71c6d8-ba0b-46da-a558-f6c91cfddd59/kube-rbac-proxy-frr/0.log" Sep 29 20:16:24 crc kubenswrapper[4779]: I0929 20:16:24.213394 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-4lq9x_be71c6d8-ba0b-46da-a558-f6c91cfddd59/kube-rbac-proxy/0.log" Sep 29 20:16:24 crc kubenswrapper[4779]: I0929 20:16:24.384019 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-4lq9x_be71c6d8-ba0b-46da-a558-f6c91cfddd59/reloader/0.log" Sep 29 20:16:24 crc kubenswrapper[4779]: I0929 20:16:24.442644 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-webhook-server-5478bdb765-6f4p6_09b83c31-96ba-457f-9385-7a124ddbc54d/frr-k8s-webhook-server/0.log" Sep 29 20:16:24 crc kubenswrapper[4779]: I0929 20:16:24.622466 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-controller-manager-6ccfd99bc8-cd86r_bc87a3b2-72fa-4bca-9172-47b799399c7b/manager/0.log" Sep 29 20:16:24 crc kubenswrapper[4779]: I0929 20:16:24.720900 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-webhook-server-8764cbcb6-t7df9_3d88a574-ee3d-4b67-80c7-cb9ab603edfd/webhook-server/0.log" Sep 29 20:16:24 crc kubenswrapper[4779]: I0929 20:16:24.902945 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-2v7nb_b16e724b-3b5e-46e6-bb8a-1aebd631d549/kube-rbac-proxy/0.log" Sep 29 20:16:25 crc kubenswrapper[4779]: I0929 20:16:25.297187 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-2v7nb_b16e724b-3b5e-46e6-bb8a-1aebd631d549/speaker/0.log" Sep 29 20:16:25 crc kubenswrapper[4779]: I0929 20:16:25.470589 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-4lq9x_be71c6d8-ba0b-46da-a558-f6c91cfddd59/frr/0.log" Sep 29 20:16:30 crc kubenswrapper[4779]: I0929 20:16:30.766596 4779 scope.go:117] "RemoveContainer" containerID="ce7c9aaefc0e94314f1e9cd9f04a105e1ba3317878327817d7948af06e95f619" Sep 29 20:16:30 crc kubenswrapper[4779]: E0929 20:16:30.767597 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d5cnr_openshift-machine-config-operator(476bc421-1113-455e-bcc8-e207e47dad19)\"" pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" podUID="476bc421-1113-455e-bcc8-e207e47dad19" Sep 29 20:16:39 crc kubenswrapper[4779]: I0929 20:16:39.984329 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcwrmrj_96d504b6-b993-413d-aa0a-6406515e6008/util/0.log" Sep 29 20:16:40 crc kubenswrapper[4779]: I0929 20:16:40.142173 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcwrmrj_96d504b6-b993-413d-aa0a-6406515e6008/pull/0.log" Sep 29 20:16:40 crc kubenswrapper[4779]: I0929 20:16:40.158935 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcwrmrj_96d504b6-b993-413d-aa0a-6406515e6008/pull/0.log" Sep 29 20:16:40 crc kubenswrapper[4779]: I0929 20:16:40.162021 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcwrmrj_96d504b6-b993-413d-aa0a-6406515e6008/util/0.log" Sep 29 20:16:40 crc kubenswrapper[4779]: I0929 20:16:40.855453 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcwrmrj_96d504b6-b993-413d-aa0a-6406515e6008/util/0.log" Sep 29 20:16:40 crc kubenswrapper[4779]: I0929 20:16:40.857579 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcwrmrj_96d504b6-b993-413d-aa0a-6406515e6008/pull/0.log" Sep 29 20:16:40 crc kubenswrapper[4779]: I0929 20:16:40.891179 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcwrmrj_96d504b6-b993-413d-aa0a-6406515e6008/extract/0.log" Sep 29 20:16:41 crc kubenswrapper[4779]: I0929 20:16:41.063370 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-hvfth_9bd42e07-23d9-49a8-b02d-bd11be36fc0c/extract-utilities/0.log" Sep 29 20:16:41 crc kubenswrapper[4779]: I0929 20:16:41.215079 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-hvfth_9bd42e07-23d9-49a8-b02d-bd11be36fc0c/extract-utilities/0.log" Sep 29 20:16:41 crc kubenswrapper[4779]: I0929 20:16:41.273948 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-hvfth_9bd42e07-23d9-49a8-b02d-bd11be36fc0c/extract-content/0.log" Sep 29 20:16:41 crc kubenswrapper[4779]: I0929 20:16:41.276862 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-hvfth_9bd42e07-23d9-49a8-b02d-bd11be36fc0c/extract-content/0.log" Sep 29 20:16:41 crc kubenswrapper[4779]: I0929 20:16:41.493383 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-hvfth_9bd42e07-23d9-49a8-b02d-bd11be36fc0c/extract-content/0.log" Sep 29 20:16:41 crc kubenswrapper[4779]: I0929 20:16:41.512050 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-hvfth_9bd42e07-23d9-49a8-b02d-bd11be36fc0c/extract-utilities/0.log" Sep 29 20:16:41 crc kubenswrapper[4779]: I0929 20:16:41.766046 4779 scope.go:117] "RemoveContainer" containerID="ce7c9aaefc0e94314f1e9cd9f04a105e1ba3317878327817d7948af06e95f619" Sep 29 20:16:41 crc kubenswrapper[4779]: E0929 20:16:41.766277 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d5cnr_openshift-machine-config-operator(476bc421-1113-455e-bcc8-e207e47dad19)\"" pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" podUID="476bc421-1113-455e-bcc8-e207e47dad19" Sep 29 20:16:41 crc kubenswrapper[4779]: I0929 20:16:41.868924 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-rtkr2_97741d17-ab9c-415a-b310-38e8c914d91a/extract-utilities/0.log" Sep 29 20:16:41 crc kubenswrapper[4779]: I0929 20:16:41.892189 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-hvfth_9bd42e07-23d9-49a8-b02d-bd11be36fc0c/registry-server/0.log" Sep 29 20:16:42 crc kubenswrapper[4779]: I0929 20:16:42.161005 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-rtkr2_97741d17-ab9c-415a-b310-38e8c914d91a/extract-utilities/0.log" Sep 29 20:16:42 crc kubenswrapper[4779]: I0929 20:16:42.194713 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-rtkr2_97741d17-ab9c-415a-b310-38e8c914d91a/extract-content/0.log" Sep 29 20:16:42 crc kubenswrapper[4779]: I0929 20:16:42.199811 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-rtkr2_97741d17-ab9c-415a-b310-38e8c914d91a/extract-content/0.log" Sep 29 20:16:42 crc kubenswrapper[4779]: I0929 20:16:42.396878 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-rtkr2_97741d17-ab9c-415a-b310-38e8c914d91a/extract-utilities/0.log" Sep 29 20:16:42 crc kubenswrapper[4779]: I0929 20:16:42.440783 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-rtkr2_97741d17-ab9c-415a-b310-38e8c914d91a/extract-content/0.log" Sep 29 20:16:42 crc kubenswrapper[4779]: I0929 20:16:42.655550 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d968mxxp_a1246df7-aa2f-4e5a-9a55-99e92227fcbf/util/0.log" Sep 29 20:16:42 crc kubenswrapper[4779]: I0929 20:16:42.826596 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d968mxxp_a1246df7-aa2f-4e5a-9a55-99e92227fcbf/util/0.log" Sep 29 20:16:42 crc kubenswrapper[4779]: I0929 20:16:42.838461 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d968mxxp_a1246df7-aa2f-4e5a-9a55-99e92227fcbf/pull/0.log" Sep 29 20:16:42 crc kubenswrapper[4779]: I0929 20:16:42.925930 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d968mxxp_a1246df7-aa2f-4e5a-9a55-99e92227fcbf/pull/0.log" Sep 29 20:16:42 crc kubenswrapper[4779]: I0929 20:16:42.980565 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-rtkr2_97741d17-ab9c-415a-b310-38e8c914d91a/registry-server/0.log" Sep 29 20:16:43 crc kubenswrapper[4779]: I0929 20:16:43.048803 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d968mxxp_a1246df7-aa2f-4e5a-9a55-99e92227fcbf/pull/0.log" Sep 29 20:16:43 crc kubenswrapper[4779]: I0929 20:16:43.067343 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d968mxxp_a1246df7-aa2f-4e5a-9a55-99e92227fcbf/util/0.log" Sep 29 20:16:43 crc kubenswrapper[4779]: I0929 20:16:43.084943 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d968mxxp_a1246df7-aa2f-4e5a-9a55-99e92227fcbf/extract/0.log" Sep 29 20:16:43 crc kubenswrapper[4779]: I0929 20:16:43.219176 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-vcrlk_378c23da-08aa-4f09-9171-29a4f81908bb/marketplace-operator/0.log" Sep 29 20:16:43 crc kubenswrapper[4779]: I0929 20:16:43.319905 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-6w68s_664a3e86-9290-495f-9b9b-7b8fe5dc7177/extract-utilities/0.log" Sep 29 20:16:43 crc kubenswrapper[4779]: I0929 20:16:43.444312 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-6w68s_664a3e86-9290-495f-9b9b-7b8fe5dc7177/extract-utilities/0.log" Sep 29 20:16:43 crc kubenswrapper[4779]: I0929 20:16:43.481025 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-6w68s_664a3e86-9290-495f-9b9b-7b8fe5dc7177/extract-content/0.log" Sep 29 20:16:43 crc kubenswrapper[4779]: I0929 20:16:43.588960 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-6w68s_664a3e86-9290-495f-9b9b-7b8fe5dc7177/extract-content/0.log" Sep 29 20:16:43 crc kubenswrapper[4779]: I0929 20:16:43.766777 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-6w68s_664a3e86-9290-495f-9b9b-7b8fe5dc7177/extract-utilities/0.log" Sep 29 20:16:43 crc kubenswrapper[4779]: I0929 20:16:43.838882 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-9v764_23a92b93-fce1-410f-b088-394235c8d3b8/extract-utilities/0.log" Sep 29 20:16:43 crc kubenswrapper[4779]: I0929 20:16:43.898782 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-6w68s_664a3e86-9290-495f-9b9b-7b8fe5dc7177/registry-server/0.log" Sep 29 20:16:43 crc kubenswrapper[4779]: I0929 20:16:43.899543 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-6w68s_664a3e86-9290-495f-9b9b-7b8fe5dc7177/extract-content/0.log" Sep 29 20:16:43 crc kubenswrapper[4779]: I0929 20:16:43.966082 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-9v764_23a92b93-fce1-410f-b088-394235c8d3b8/extract-utilities/0.log" Sep 29 20:16:44 crc kubenswrapper[4779]: I0929 20:16:44.050353 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-9v764_23a92b93-fce1-410f-b088-394235c8d3b8/extract-content/0.log" Sep 29 20:16:44 crc kubenswrapper[4779]: I0929 20:16:44.085636 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-9v764_23a92b93-fce1-410f-b088-394235c8d3b8/extract-content/0.log" Sep 29 20:16:44 crc kubenswrapper[4779]: I0929 20:16:44.257063 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-9v764_23a92b93-fce1-410f-b088-394235c8d3b8/extract-utilities/0.log" Sep 29 20:16:44 crc kubenswrapper[4779]: I0929 20:16:44.267304 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-9v764_23a92b93-fce1-410f-b088-394235c8d3b8/extract-content/0.log" Sep 29 20:16:44 crc kubenswrapper[4779]: I0929 20:16:44.616263 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-9v764_23a92b93-fce1-410f-b088-394235c8d3b8/registry-server/0.log" Sep 29 20:16:45 crc kubenswrapper[4779]: I0929 20:16:45.977342 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-hr5gd"] Sep 29 20:16:45 crc kubenswrapper[4779]: E0929 20:16:45.977981 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5b2e5468-b1c5-4aed-8a04-21921042c860" containerName="container-00" Sep 29 20:16:45 crc kubenswrapper[4779]: I0929 20:16:45.977993 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="5b2e5468-b1c5-4aed-8a04-21921042c860" containerName="container-00" Sep 29 20:16:45 crc kubenswrapper[4779]: I0929 20:16:45.978157 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="5b2e5468-b1c5-4aed-8a04-21921042c860" containerName="container-00" Sep 29 20:16:45 crc kubenswrapper[4779]: I0929 20:16:45.983932 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-hr5gd" Sep 29 20:16:46 crc kubenswrapper[4779]: I0929 20:16:46.003017 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-hr5gd"] Sep 29 20:16:46 crc kubenswrapper[4779]: I0929 20:16:46.073420 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5d70e5e2-9de3-4d5b-9c76-1870eb488e09-catalog-content\") pod \"certified-operators-hr5gd\" (UID: \"5d70e5e2-9de3-4d5b-9c76-1870eb488e09\") " pod="openshift-marketplace/certified-operators-hr5gd" Sep 29 20:16:46 crc kubenswrapper[4779]: I0929 20:16:46.073540 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c9rqs\" (UniqueName: \"kubernetes.io/projected/5d70e5e2-9de3-4d5b-9c76-1870eb488e09-kube-api-access-c9rqs\") pod \"certified-operators-hr5gd\" (UID: \"5d70e5e2-9de3-4d5b-9c76-1870eb488e09\") " pod="openshift-marketplace/certified-operators-hr5gd" Sep 29 20:16:46 crc kubenswrapper[4779]: I0929 20:16:46.073587 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5d70e5e2-9de3-4d5b-9c76-1870eb488e09-utilities\") pod \"certified-operators-hr5gd\" (UID: \"5d70e5e2-9de3-4d5b-9c76-1870eb488e09\") " pod="openshift-marketplace/certified-operators-hr5gd" Sep 29 20:16:46 crc kubenswrapper[4779]: I0929 20:16:46.175195 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5d70e5e2-9de3-4d5b-9c76-1870eb488e09-catalog-content\") pod \"certified-operators-hr5gd\" (UID: \"5d70e5e2-9de3-4d5b-9c76-1870eb488e09\") " pod="openshift-marketplace/certified-operators-hr5gd" Sep 29 20:16:46 crc kubenswrapper[4779]: I0929 20:16:46.175260 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c9rqs\" (UniqueName: \"kubernetes.io/projected/5d70e5e2-9de3-4d5b-9c76-1870eb488e09-kube-api-access-c9rqs\") pod \"certified-operators-hr5gd\" (UID: \"5d70e5e2-9de3-4d5b-9c76-1870eb488e09\") " pod="openshift-marketplace/certified-operators-hr5gd" Sep 29 20:16:46 crc kubenswrapper[4779]: I0929 20:16:46.175297 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5d70e5e2-9de3-4d5b-9c76-1870eb488e09-utilities\") pod \"certified-operators-hr5gd\" (UID: \"5d70e5e2-9de3-4d5b-9c76-1870eb488e09\") " pod="openshift-marketplace/certified-operators-hr5gd" Sep 29 20:16:46 crc kubenswrapper[4779]: I0929 20:16:46.175749 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5d70e5e2-9de3-4d5b-9c76-1870eb488e09-catalog-content\") pod \"certified-operators-hr5gd\" (UID: \"5d70e5e2-9de3-4d5b-9c76-1870eb488e09\") " pod="openshift-marketplace/certified-operators-hr5gd" Sep 29 20:16:46 crc kubenswrapper[4779]: I0929 20:16:46.175797 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5d70e5e2-9de3-4d5b-9c76-1870eb488e09-utilities\") pod \"certified-operators-hr5gd\" (UID: \"5d70e5e2-9de3-4d5b-9c76-1870eb488e09\") " pod="openshift-marketplace/certified-operators-hr5gd" Sep 29 20:16:46 crc kubenswrapper[4779]: I0929 20:16:46.200136 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c9rqs\" (UniqueName: \"kubernetes.io/projected/5d70e5e2-9de3-4d5b-9c76-1870eb488e09-kube-api-access-c9rqs\") pod \"certified-operators-hr5gd\" (UID: \"5d70e5e2-9de3-4d5b-9c76-1870eb488e09\") " pod="openshift-marketplace/certified-operators-hr5gd" Sep 29 20:16:46 crc kubenswrapper[4779]: I0929 20:16:46.312033 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-hr5gd" Sep 29 20:16:46 crc kubenswrapper[4779]: I0929 20:16:46.667507 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-hr5gd"] Sep 29 20:16:47 crc kubenswrapper[4779]: I0929 20:16:47.096380 4779 generic.go:334] "Generic (PLEG): container finished" podID="5d70e5e2-9de3-4d5b-9c76-1870eb488e09" containerID="cfb8a800c3084c2686047f9dbe691a5bc08dc2a81c71879fad0904562da38650" exitCode=0 Sep 29 20:16:47 crc kubenswrapper[4779]: I0929 20:16:47.096448 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hr5gd" event={"ID":"5d70e5e2-9de3-4d5b-9c76-1870eb488e09","Type":"ContainerDied","Data":"cfb8a800c3084c2686047f9dbe691a5bc08dc2a81c71879fad0904562da38650"} Sep 29 20:16:47 crc kubenswrapper[4779]: I0929 20:16:47.098284 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hr5gd" event={"ID":"5d70e5e2-9de3-4d5b-9c76-1870eb488e09","Type":"ContainerStarted","Data":"8eb8aee4e8e05fdfd625b917d44c352e6ca155638c54cd5b416eb0ec140dd57d"} Sep 29 20:16:47 crc kubenswrapper[4779]: I0929 20:16:47.098901 4779 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Sep 29 20:16:49 crc kubenswrapper[4779]: I0929 20:16:49.120896 4779 generic.go:334] "Generic (PLEG): container finished" podID="5d70e5e2-9de3-4d5b-9c76-1870eb488e09" containerID="fe6a2ca1ff09098c8c79ab65f4c18fb39d3b8d1b19d13a88df36d07dca879b55" exitCode=0 Sep 29 20:16:49 crc kubenswrapper[4779]: I0929 20:16:49.121018 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hr5gd" event={"ID":"5d70e5e2-9de3-4d5b-9c76-1870eb488e09","Type":"ContainerDied","Data":"fe6a2ca1ff09098c8c79ab65f4c18fb39d3b8d1b19d13a88df36d07dca879b55"} Sep 29 20:16:50 crc kubenswrapper[4779]: I0929 20:16:50.132761 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hr5gd" event={"ID":"5d70e5e2-9de3-4d5b-9c76-1870eb488e09","Type":"ContainerStarted","Data":"604d3ef4c9df68e4f9d97bcef423e218a51ca034b9c030e6b191130dd77e58da"} Sep 29 20:16:50 crc kubenswrapper[4779]: I0929 20:16:50.155405 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-hr5gd" podStartSLOduration=2.541925874 podStartE2EDuration="5.155381633s" podCreationTimestamp="2025-09-29 20:16:45 +0000 UTC" firstStartedPulling="2025-09-29 20:16:47.098507691 +0000 UTC m=+4117.982932801" lastFinishedPulling="2025-09-29 20:16:49.71196346 +0000 UTC m=+4120.596388560" observedRunningTime="2025-09-29 20:16:50.148753773 +0000 UTC m=+4121.033178883" watchObservedRunningTime="2025-09-29 20:16:50.155381633 +0000 UTC m=+4121.039806753" Sep 29 20:16:50 crc kubenswrapper[4779]: I0929 20:16:50.780252 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-7ctf7"] Sep 29 20:16:50 crc kubenswrapper[4779]: I0929 20:16:50.783221 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-7ctf7" Sep 29 20:16:50 crc kubenswrapper[4779]: I0929 20:16:50.811853 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-7ctf7"] Sep 29 20:16:50 crc kubenswrapper[4779]: I0929 20:16:50.973223 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/17f7b521-b055-4cd3-bbbe-d2a63e77ff7c-catalog-content\") pod \"redhat-operators-7ctf7\" (UID: \"17f7b521-b055-4cd3-bbbe-d2a63e77ff7c\") " pod="openshift-marketplace/redhat-operators-7ctf7" Sep 29 20:16:50 crc kubenswrapper[4779]: I0929 20:16:50.973308 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f7wj5\" (UniqueName: \"kubernetes.io/projected/17f7b521-b055-4cd3-bbbe-d2a63e77ff7c-kube-api-access-f7wj5\") pod \"redhat-operators-7ctf7\" (UID: \"17f7b521-b055-4cd3-bbbe-d2a63e77ff7c\") " pod="openshift-marketplace/redhat-operators-7ctf7" Sep 29 20:16:50 crc kubenswrapper[4779]: I0929 20:16:50.973358 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/17f7b521-b055-4cd3-bbbe-d2a63e77ff7c-utilities\") pod \"redhat-operators-7ctf7\" (UID: \"17f7b521-b055-4cd3-bbbe-d2a63e77ff7c\") " pod="openshift-marketplace/redhat-operators-7ctf7" Sep 29 20:16:51 crc kubenswrapper[4779]: I0929 20:16:51.075044 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f7wj5\" (UniqueName: \"kubernetes.io/projected/17f7b521-b055-4cd3-bbbe-d2a63e77ff7c-kube-api-access-f7wj5\") pod \"redhat-operators-7ctf7\" (UID: \"17f7b521-b055-4cd3-bbbe-d2a63e77ff7c\") " pod="openshift-marketplace/redhat-operators-7ctf7" Sep 29 20:16:51 crc kubenswrapper[4779]: I0929 20:16:51.075136 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/17f7b521-b055-4cd3-bbbe-d2a63e77ff7c-utilities\") pod \"redhat-operators-7ctf7\" (UID: \"17f7b521-b055-4cd3-bbbe-d2a63e77ff7c\") " pod="openshift-marketplace/redhat-operators-7ctf7" Sep 29 20:16:51 crc kubenswrapper[4779]: I0929 20:16:51.075515 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/17f7b521-b055-4cd3-bbbe-d2a63e77ff7c-catalog-content\") pod \"redhat-operators-7ctf7\" (UID: \"17f7b521-b055-4cd3-bbbe-d2a63e77ff7c\") " pod="openshift-marketplace/redhat-operators-7ctf7" Sep 29 20:16:51 crc kubenswrapper[4779]: I0929 20:16:51.075659 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/17f7b521-b055-4cd3-bbbe-d2a63e77ff7c-utilities\") pod \"redhat-operators-7ctf7\" (UID: \"17f7b521-b055-4cd3-bbbe-d2a63e77ff7c\") " pod="openshift-marketplace/redhat-operators-7ctf7" Sep 29 20:16:51 crc kubenswrapper[4779]: I0929 20:16:51.075997 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/17f7b521-b055-4cd3-bbbe-d2a63e77ff7c-catalog-content\") pod \"redhat-operators-7ctf7\" (UID: \"17f7b521-b055-4cd3-bbbe-d2a63e77ff7c\") " pod="openshift-marketplace/redhat-operators-7ctf7" Sep 29 20:16:51 crc kubenswrapper[4779]: I0929 20:16:51.098402 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f7wj5\" (UniqueName: \"kubernetes.io/projected/17f7b521-b055-4cd3-bbbe-d2a63e77ff7c-kube-api-access-f7wj5\") pod \"redhat-operators-7ctf7\" (UID: \"17f7b521-b055-4cd3-bbbe-d2a63e77ff7c\") " pod="openshift-marketplace/redhat-operators-7ctf7" Sep 29 20:16:51 crc kubenswrapper[4779]: I0929 20:16:51.113076 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-7ctf7" Sep 29 20:16:51 crc kubenswrapper[4779]: I0929 20:16:51.670702 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-7ctf7"] Sep 29 20:16:52 crc kubenswrapper[4779]: I0929 20:16:52.172749 4779 generic.go:334] "Generic (PLEG): container finished" podID="17f7b521-b055-4cd3-bbbe-d2a63e77ff7c" containerID="7a5f2a04847ebdc619142829ac4a2270a6be2e84e328c1b4036e1d490e6dc9f7" exitCode=0 Sep 29 20:16:52 crc kubenswrapper[4779]: I0929 20:16:52.172919 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-7ctf7" event={"ID":"17f7b521-b055-4cd3-bbbe-d2a63e77ff7c","Type":"ContainerDied","Data":"7a5f2a04847ebdc619142829ac4a2270a6be2e84e328c1b4036e1d490e6dc9f7"} Sep 29 20:16:52 crc kubenswrapper[4779]: I0929 20:16:52.173161 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-7ctf7" event={"ID":"17f7b521-b055-4cd3-bbbe-d2a63e77ff7c","Type":"ContainerStarted","Data":"16246ddb963f38277de61e875c12829cd8893d38bbdcbe2fef1b8a21da4f375c"} Sep 29 20:16:54 crc kubenswrapper[4779]: I0929 20:16:54.198439 4779 generic.go:334] "Generic (PLEG): container finished" podID="17f7b521-b055-4cd3-bbbe-d2a63e77ff7c" containerID="5cf30105c6b2aa97cd56570ac2b82947b5bec3458c93dd425922aa4e73b8b4d9" exitCode=0 Sep 29 20:16:54 crc kubenswrapper[4779]: I0929 20:16:54.198631 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-7ctf7" event={"ID":"17f7b521-b055-4cd3-bbbe-d2a63e77ff7c","Type":"ContainerDied","Data":"5cf30105c6b2aa97cd56570ac2b82947b5bec3458c93dd425922aa4e73b8b4d9"} Sep 29 20:16:54 crc kubenswrapper[4779]: I0929 20:16:54.766268 4779 scope.go:117] "RemoveContainer" containerID="ce7c9aaefc0e94314f1e9cd9f04a105e1ba3317878327817d7948af06e95f619" Sep 29 20:16:54 crc kubenswrapper[4779]: E0929 20:16:54.766520 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d5cnr_openshift-machine-config-operator(476bc421-1113-455e-bcc8-e207e47dad19)\"" pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" podUID="476bc421-1113-455e-bcc8-e207e47dad19" Sep 29 20:16:55 crc kubenswrapper[4779]: I0929 20:16:55.213351 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-7ctf7" event={"ID":"17f7b521-b055-4cd3-bbbe-d2a63e77ff7c","Type":"ContainerStarted","Data":"242e4e1fd302cfe87b94f59cfffa7d91ff6a76cd65ae8e6a13446ad2275aa13f"} Sep 29 20:16:56 crc kubenswrapper[4779]: I0929 20:16:56.312972 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-hr5gd" Sep 29 20:16:56 crc kubenswrapper[4779]: I0929 20:16:56.313377 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-hr5gd" Sep 29 20:16:56 crc kubenswrapper[4779]: I0929 20:16:56.363144 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-hr5gd" Sep 29 20:16:56 crc kubenswrapper[4779]: I0929 20:16:56.383431 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-7ctf7" podStartSLOduration=3.9415182 podStartE2EDuration="6.383408473s" podCreationTimestamp="2025-09-29 20:16:50 +0000 UTC" firstStartedPulling="2025-09-29 20:16:52.174421155 +0000 UTC m=+4123.058846255" lastFinishedPulling="2025-09-29 20:16:54.616311408 +0000 UTC m=+4125.500736528" observedRunningTime="2025-09-29 20:16:55.241172316 +0000 UTC m=+4126.125597426" watchObservedRunningTime="2025-09-29 20:16:56.383408473 +0000 UTC m=+4127.267833613" Sep 29 20:16:57 crc kubenswrapper[4779]: I0929 20:16:57.278067 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-hr5gd" Sep 29 20:16:58 crc kubenswrapper[4779]: I0929 20:16:58.167798 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-hr5gd"] Sep 29 20:16:59 crc kubenswrapper[4779]: I0929 20:16:59.249888 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-hr5gd" podUID="5d70e5e2-9de3-4d5b-9c76-1870eb488e09" containerName="registry-server" containerID="cri-o://604d3ef4c9df68e4f9d97bcef423e218a51ca034b9c030e6b191130dd77e58da" gracePeriod=2 Sep 29 20:17:01 crc kubenswrapper[4779]: I0929 20:17:01.113850 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-7ctf7" Sep 29 20:17:01 crc kubenswrapper[4779]: I0929 20:17:01.114461 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-7ctf7" Sep 29 20:17:01 crc kubenswrapper[4779]: I0929 20:17:01.176031 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-7ctf7" Sep 29 20:17:01 crc kubenswrapper[4779]: I0929 20:17:01.276640 4779 generic.go:334] "Generic (PLEG): container finished" podID="5d70e5e2-9de3-4d5b-9c76-1870eb488e09" containerID="604d3ef4c9df68e4f9d97bcef423e218a51ca034b9c030e6b191130dd77e58da" exitCode=0 Sep 29 20:17:01 crc kubenswrapper[4779]: I0929 20:17:01.276707 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hr5gd" event={"ID":"5d70e5e2-9de3-4d5b-9c76-1870eb488e09","Type":"ContainerDied","Data":"604d3ef4c9df68e4f9d97bcef423e218a51ca034b9c030e6b191130dd77e58da"} Sep 29 20:17:01 crc kubenswrapper[4779]: I0929 20:17:01.323700 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-7ctf7" Sep 29 20:17:01 crc kubenswrapper[4779]: I0929 20:17:01.674928 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-hr5gd" Sep 29 20:17:01 crc kubenswrapper[4779]: I0929 20:17:01.763584 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-7ctf7"] Sep 29 20:17:01 crc kubenswrapper[4779]: I0929 20:17:01.802400 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5d70e5e2-9de3-4d5b-9c76-1870eb488e09-utilities\") pod \"5d70e5e2-9de3-4d5b-9c76-1870eb488e09\" (UID: \"5d70e5e2-9de3-4d5b-9c76-1870eb488e09\") " Sep 29 20:17:01 crc kubenswrapper[4779]: I0929 20:17:01.802495 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5d70e5e2-9de3-4d5b-9c76-1870eb488e09-catalog-content\") pod \"5d70e5e2-9de3-4d5b-9c76-1870eb488e09\" (UID: \"5d70e5e2-9de3-4d5b-9c76-1870eb488e09\") " Sep 29 20:17:01 crc kubenswrapper[4779]: I0929 20:17:01.802642 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-c9rqs\" (UniqueName: \"kubernetes.io/projected/5d70e5e2-9de3-4d5b-9c76-1870eb488e09-kube-api-access-c9rqs\") pod \"5d70e5e2-9de3-4d5b-9c76-1870eb488e09\" (UID: \"5d70e5e2-9de3-4d5b-9c76-1870eb488e09\") " Sep 29 20:17:01 crc kubenswrapper[4779]: I0929 20:17:01.803613 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5d70e5e2-9de3-4d5b-9c76-1870eb488e09-utilities" (OuterVolumeSpecName: "utilities") pod "5d70e5e2-9de3-4d5b-9c76-1870eb488e09" (UID: "5d70e5e2-9de3-4d5b-9c76-1870eb488e09"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 20:17:01 crc kubenswrapper[4779]: I0929 20:17:01.842274 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5d70e5e2-9de3-4d5b-9c76-1870eb488e09-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5d70e5e2-9de3-4d5b-9c76-1870eb488e09" (UID: "5d70e5e2-9de3-4d5b-9c76-1870eb488e09"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 20:17:01 crc kubenswrapper[4779]: I0929 20:17:01.904793 4779 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5d70e5e2-9de3-4d5b-9c76-1870eb488e09-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 20:17:01 crc kubenswrapper[4779]: I0929 20:17:01.904830 4779 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5d70e5e2-9de3-4d5b-9c76-1870eb488e09-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 20:17:02 crc kubenswrapper[4779]: I0929 20:17:02.056588 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5d70e5e2-9de3-4d5b-9c76-1870eb488e09-kube-api-access-c9rqs" (OuterVolumeSpecName: "kube-api-access-c9rqs") pod "5d70e5e2-9de3-4d5b-9c76-1870eb488e09" (UID: "5d70e5e2-9de3-4d5b-9c76-1870eb488e09"). InnerVolumeSpecName "kube-api-access-c9rqs". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 20:17:02 crc kubenswrapper[4779]: I0929 20:17:02.108765 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-c9rqs\" (UniqueName: \"kubernetes.io/projected/5d70e5e2-9de3-4d5b-9c76-1870eb488e09-kube-api-access-c9rqs\") on node \"crc\" DevicePath \"\"" Sep 29 20:17:02 crc kubenswrapper[4779]: I0929 20:17:02.301982 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-hr5gd" Sep 29 20:17:02 crc kubenswrapper[4779]: I0929 20:17:02.302819 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hr5gd" event={"ID":"5d70e5e2-9de3-4d5b-9c76-1870eb488e09","Type":"ContainerDied","Data":"8eb8aee4e8e05fdfd625b917d44c352e6ca155638c54cd5b416eb0ec140dd57d"} Sep 29 20:17:02 crc kubenswrapper[4779]: I0929 20:17:02.302885 4779 scope.go:117] "RemoveContainer" containerID="604d3ef4c9df68e4f9d97bcef423e218a51ca034b9c030e6b191130dd77e58da" Sep 29 20:17:02 crc kubenswrapper[4779]: I0929 20:17:02.335671 4779 scope.go:117] "RemoveContainer" containerID="fe6a2ca1ff09098c8c79ab65f4c18fb39d3b8d1b19d13a88df36d07dca879b55" Sep 29 20:17:02 crc kubenswrapper[4779]: I0929 20:17:02.335911 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-hr5gd"] Sep 29 20:17:02 crc kubenswrapper[4779]: I0929 20:17:02.350108 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-hr5gd"] Sep 29 20:17:02 crc kubenswrapper[4779]: I0929 20:17:02.364199 4779 scope.go:117] "RemoveContainer" containerID="cfb8a800c3084c2686047f9dbe691a5bc08dc2a81c71879fad0904562da38650" Sep 29 20:17:03 crc kubenswrapper[4779]: I0929 20:17:03.312062 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-7ctf7" podUID="17f7b521-b055-4cd3-bbbe-d2a63e77ff7c" containerName="registry-server" containerID="cri-o://242e4e1fd302cfe87b94f59cfffa7d91ff6a76cd65ae8e6a13446ad2275aa13f" gracePeriod=2 Sep 29 20:17:03 crc kubenswrapper[4779]: I0929 20:17:03.787999 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5d70e5e2-9de3-4d5b-9c76-1870eb488e09" path="/var/lib/kubelet/pods/5d70e5e2-9de3-4d5b-9c76-1870eb488e09/volumes" Sep 29 20:17:03 crc kubenswrapper[4779]: I0929 20:17:03.836647 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-7ctf7" Sep 29 20:17:03 crc kubenswrapper[4779]: I0929 20:17:03.945243 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-f7wj5\" (UniqueName: \"kubernetes.io/projected/17f7b521-b055-4cd3-bbbe-d2a63e77ff7c-kube-api-access-f7wj5\") pod \"17f7b521-b055-4cd3-bbbe-d2a63e77ff7c\" (UID: \"17f7b521-b055-4cd3-bbbe-d2a63e77ff7c\") " Sep 29 20:17:03 crc kubenswrapper[4779]: I0929 20:17:03.945408 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/17f7b521-b055-4cd3-bbbe-d2a63e77ff7c-catalog-content\") pod \"17f7b521-b055-4cd3-bbbe-d2a63e77ff7c\" (UID: \"17f7b521-b055-4cd3-bbbe-d2a63e77ff7c\") " Sep 29 20:17:03 crc kubenswrapper[4779]: I0929 20:17:03.945535 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/17f7b521-b055-4cd3-bbbe-d2a63e77ff7c-utilities\") pod \"17f7b521-b055-4cd3-bbbe-d2a63e77ff7c\" (UID: \"17f7b521-b055-4cd3-bbbe-d2a63e77ff7c\") " Sep 29 20:17:03 crc kubenswrapper[4779]: I0929 20:17:03.946441 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/17f7b521-b055-4cd3-bbbe-d2a63e77ff7c-utilities" (OuterVolumeSpecName: "utilities") pod "17f7b521-b055-4cd3-bbbe-d2a63e77ff7c" (UID: "17f7b521-b055-4cd3-bbbe-d2a63e77ff7c"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 20:17:03 crc kubenswrapper[4779]: I0929 20:17:03.950413 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/17f7b521-b055-4cd3-bbbe-d2a63e77ff7c-kube-api-access-f7wj5" (OuterVolumeSpecName: "kube-api-access-f7wj5") pod "17f7b521-b055-4cd3-bbbe-d2a63e77ff7c" (UID: "17f7b521-b055-4cd3-bbbe-d2a63e77ff7c"). InnerVolumeSpecName "kube-api-access-f7wj5". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 20:17:04 crc kubenswrapper[4779]: I0929 20:17:04.029876 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/17f7b521-b055-4cd3-bbbe-d2a63e77ff7c-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "17f7b521-b055-4cd3-bbbe-d2a63e77ff7c" (UID: "17f7b521-b055-4cd3-bbbe-d2a63e77ff7c"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 20:17:04 crc kubenswrapper[4779]: I0929 20:17:04.047843 4779 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/17f7b521-b055-4cd3-bbbe-d2a63e77ff7c-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 20:17:04 crc kubenswrapper[4779]: I0929 20:17:04.047884 4779 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/17f7b521-b055-4cd3-bbbe-d2a63e77ff7c-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 20:17:04 crc kubenswrapper[4779]: I0929 20:17:04.047894 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-f7wj5\" (UniqueName: \"kubernetes.io/projected/17f7b521-b055-4cd3-bbbe-d2a63e77ff7c-kube-api-access-f7wj5\") on node \"crc\" DevicePath \"\"" Sep 29 20:17:04 crc kubenswrapper[4779]: I0929 20:17:04.337457 4779 generic.go:334] "Generic (PLEG): container finished" podID="17f7b521-b055-4cd3-bbbe-d2a63e77ff7c" containerID="242e4e1fd302cfe87b94f59cfffa7d91ff6a76cd65ae8e6a13446ad2275aa13f" exitCode=0 Sep 29 20:17:04 crc kubenswrapper[4779]: I0929 20:17:04.337500 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-7ctf7" event={"ID":"17f7b521-b055-4cd3-bbbe-d2a63e77ff7c","Type":"ContainerDied","Data":"242e4e1fd302cfe87b94f59cfffa7d91ff6a76cd65ae8e6a13446ad2275aa13f"} Sep 29 20:17:04 crc kubenswrapper[4779]: I0929 20:17:04.337528 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-7ctf7" event={"ID":"17f7b521-b055-4cd3-bbbe-d2a63e77ff7c","Type":"ContainerDied","Data":"16246ddb963f38277de61e875c12829cd8893d38bbdcbe2fef1b8a21da4f375c"} Sep 29 20:17:04 crc kubenswrapper[4779]: I0929 20:17:04.337545 4779 scope.go:117] "RemoveContainer" containerID="242e4e1fd302cfe87b94f59cfffa7d91ff6a76cd65ae8e6a13446ad2275aa13f" Sep 29 20:17:04 crc kubenswrapper[4779]: I0929 20:17:04.337542 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-7ctf7" Sep 29 20:17:04 crc kubenswrapper[4779]: I0929 20:17:04.380827 4779 scope.go:117] "RemoveContainer" containerID="5cf30105c6b2aa97cd56570ac2b82947b5bec3458c93dd425922aa4e73b8b4d9" Sep 29 20:17:04 crc kubenswrapper[4779]: I0929 20:17:04.383714 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-7ctf7"] Sep 29 20:17:04 crc kubenswrapper[4779]: I0929 20:17:04.394188 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-7ctf7"] Sep 29 20:17:04 crc kubenswrapper[4779]: I0929 20:17:04.408505 4779 scope.go:117] "RemoveContainer" containerID="7a5f2a04847ebdc619142829ac4a2270a6be2e84e328c1b4036e1d490e6dc9f7" Sep 29 20:17:04 crc kubenswrapper[4779]: I0929 20:17:04.453365 4779 scope.go:117] "RemoveContainer" containerID="242e4e1fd302cfe87b94f59cfffa7d91ff6a76cd65ae8e6a13446ad2275aa13f" Sep 29 20:17:04 crc kubenswrapper[4779]: E0929 20:17:04.457428 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"242e4e1fd302cfe87b94f59cfffa7d91ff6a76cd65ae8e6a13446ad2275aa13f\": container with ID starting with 242e4e1fd302cfe87b94f59cfffa7d91ff6a76cd65ae8e6a13446ad2275aa13f not found: ID does not exist" containerID="242e4e1fd302cfe87b94f59cfffa7d91ff6a76cd65ae8e6a13446ad2275aa13f" Sep 29 20:17:04 crc kubenswrapper[4779]: I0929 20:17:04.457467 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"242e4e1fd302cfe87b94f59cfffa7d91ff6a76cd65ae8e6a13446ad2275aa13f"} err="failed to get container status \"242e4e1fd302cfe87b94f59cfffa7d91ff6a76cd65ae8e6a13446ad2275aa13f\": rpc error: code = NotFound desc = could not find container \"242e4e1fd302cfe87b94f59cfffa7d91ff6a76cd65ae8e6a13446ad2275aa13f\": container with ID starting with 242e4e1fd302cfe87b94f59cfffa7d91ff6a76cd65ae8e6a13446ad2275aa13f not found: ID does not exist" Sep 29 20:17:04 crc kubenswrapper[4779]: I0929 20:17:04.457492 4779 scope.go:117] "RemoveContainer" containerID="5cf30105c6b2aa97cd56570ac2b82947b5bec3458c93dd425922aa4e73b8b4d9" Sep 29 20:17:04 crc kubenswrapper[4779]: E0929 20:17:04.459001 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5cf30105c6b2aa97cd56570ac2b82947b5bec3458c93dd425922aa4e73b8b4d9\": container with ID starting with 5cf30105c6b2aa97cd56570ac2b82947b5bec3458c93dd425922aa4e73b8b4d9 not found: ID does not exist" containerID="5cf30105c6b2aa97cd56570ac2b82947b5bec3458c93dd425922aa4e73b8b4d9" Sep 29 20:17:04 crc kubenswrapper[4779]: I0929 20:17:04.459033 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5cf30105c6b2aa97cd56570ac2b82947b5bec3458c93dd425922aa4e73b8b4d9"} err="failed to get container status \"5cf30105c6b2aa97cd56570ac2b82947b5bec3458c93dd425922aa4e73b8b4d9\": rpc error: code = NotFound desc = could not find container \"5cf30105c6b2aa97cd56570ac2b82947b5bec3458c93dd425922aa4e73b8b4d9\": container with ID starting with 5cf30105c6b2aa97cd56570ac2b82947b5bec3458c93dd425922aa4e73b8b4d9 not found: ID does not exist" Sep 29 20:17:04 crc kubenswrapper[4779]: I0929 20:17:04.459055 4779 scope.go:117] "RemoveContainer" containerID="7a5f2a04847ebdc619142829ac4a2270a6be2e84e328c1b4036e1d490e6dc9f7" Sep 29 20:17:04 crc kubenswrapper[4779]: E0929 20:17:04.459573 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7a5f2a04847ebdc619142829ac4a2270a6be2e84e328c1b4036e1d490e6dc9f7\": container with ID starting with 7a5f2a04847ebdc619142829ac4a2270a6be2e84e328c1b4036e1d490e6dc9f7 not found: ID does not exist" containerID="7a5f2a04847ebdc619142829ac4a2270a6be2e84e328c1b4036e1d490e6dc9f7" Sep 29 20:17:04 crc kubenswrapper[4779]: I0929 20:17:04.459600 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7a5f2a04847ebdc619142829ac4a2270a6be2e84e328c1b4036e1d490e6dc9f7"} err="failed to get container status \"7a5f2a04847ebdc619142829ac4a2270a6be2e84e328c1b4036e1d490e6dc9f7\": rpc error: code = NotFound desc = could not find container \"7a5f2a04847ebdc619142829ac4a2270a6be2e84e328c1b4036e1d490e6dc9f7\": container with ID starting with 7a5f2a04847ebdc619142829ac4a2270a6be2e84e328c1b4036e1d490e6dc9f7 not found: ID does not exist" Sep 29 20:17:05 crc kubenswrapper[4779]: I0929 20:17:05.816468 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="17f7b521-b055-4cd3-bbbe-d2a63e77ff7c" path="/var/lib/kubelet/pods/17f7b521-b055-4cd3-bbbe-d2a63e77ff7c/volumes" Sep 29 20:17:06 crc kubenswrapper[4779]: I0929 20:17:06.766444 4779 scope.go:117] "RemoveContainer" containerID="ce7c9aaefc0e94314f1e9cd9f04a105e1ba3317878327817d7948af06e95f619" Sep 29 20:17:06 crc kubenswrapper[4779]: E0929 20:17:06.767065 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d5cnr_openshift-machine-config-operator(476bc421-1113-455e-bcc8-e207e47dad19)\"" pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" podUID="476bc421-1113-455e-bcc8-e207e47dad19" Sep 29 20:17:18 crc kubenswrapper[4779]: I0929 20:17:18.766221 4779 scope.go:117] "RemoveContainer" containerID="ce7c9aaefc0e94314f1e9cd9f04a105e1ba3317878327817d7948af06e95f619" Sep 29 20:17:19 crc kubenswrapper[4779]: I0929 20:17:19.467360 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" event={"ID":"476bc421-1113-455e-bcc8-e207e47dad19","Type":"ContainerStarted","Data":"412cc896daeae2d4e6e741fa0e8faefad46f2d91a3f69b921e809725fd557df2"} Sep 29 20:18:43 crc kubenswrapper[4779]: I0929 20:18:43.448364 4779 generic.go:334] "Generic (PLEG): container finished" podID="09948b09-d4c9-4ff2-a27d-396128e19259" containerID="345aecbfb226ddf49edd41e9132fd246b7bf58ebba4601b387fdb87d26154810" exitCode=0 Sep 29 20:18:43 crc kubenswrapper[4779]: I0929 20:18:43.448485 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-q2hkw/must-gather-f252t" event={"ID":"09948b09-d4c9-4ff2-a27d-396128e19259","Type":"ContainerDied","Data":"345aecbfb226ddf49edd41e9132fd246b7bf58ebba4601b387fdb87d26154810"} Sep 29 20:18:43 crc kubenswrapper[4779]: I0929 20:18:43.449983 4779 scope.go:117] "RemoveContainer" containerID="345aecbfb226ddf49edd41e9132fd246b7bf58ebba4601b387fdb87d26154810" Sep 29 20:18:44 crc kubenswrapper[4779]: I0929 20:18:44.051308 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-q2hkw_must-gather-f252t_09948b09-d4c9-4ff2-a27d-396128e19259/gather/0.log" Sep 29 20:18:55 crc kubenswrapper[4779]: I0929 20:18:55.582425 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-q2hkw/must-gather-f252t"] Sep 29 20:18:55 crc kubenswrapper[4779]: I0929 20:18:55.583608 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-must-gather-q2hkw/must-gather-f252t" podUID="09948b09-d4c9-4ff2-a27d-396128e19259" containerName="copy" containerID="cri-o://353ad72802efc09d3b791a966223a9f63627efaf7a218ecca8e43585e461de9a" gracePeriod=2 Sep 29 20:18:55 crc kubenswrapper[4779]: I0929 20:18:55.591070 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-q2hkw/must-gather-f252t"] Sep 29 20:18:56 crc kubenswrapper[4779]: I0929 20:18:56.008614 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-q2hkw_must-gather-f252t_09948b09-d4c9-4ff2-a27d-396128e19259/copy/0.log" Sep 29 20:18:56 crc kubenswrapper[4779]: I0929 20:18:56.009161 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-q2hkw/must-gather-f252t" Sep 29 20:18:56 crc kubenswrapper[4779]: I0929 20:18:56.081163 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/09948b09-d4c9-4ff2-a27d-396128e19259-must-gather-output\") pod \"09948b09-d4c9-4ff2-a27d-396128e19259\" (UID: \"09948b09-d4c9-4ff2-a27d-396128e19259\") " Sep 29 20:18:56 crc kubenswrapper[4779]: I0929 20:18:56.081242 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nxjd7\" (UniqueName: \"kubernetes.io/projected/09948b09-d4c9-4ff2-a27d-396128e19259-kube-api-access-nxjd7\") pod \"09948b09-d4c9-4ff2-a27d-396128e19259\" (UID: \"09948b09-d4c9-4ff2-a27d-396128e19259\") " Sep 29 20:18:56 crc kubenswrapper[4779]: I0929 20:18:56.089273 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09948b09-d4c9-4ff2-a27d-396128e19259-kube-api-access-nxjd7" (OuterVolumeSpecName: "kube-api-access-nxjd7") pod "09948b09-d4c9-4ff2-a27d-396128e19259" (UID: "09948b09-d4c9-4ff2-a27d-396128e19259"). InnerVolumeSpecName "kube-api-access-nxjd7". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 20:18:56 crc kubenswrapper[4779]: I0929 20:18:56.183412 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nxjd7\" (UniqueName: \"kubernetes.io/projected/09948b09-d4c9-4ff2-a27d-396128e19259-kube-api-access-nxjd7\") on node \"crc\" DevicePath \"\"" Sep 29 20:18:56 crc kubenswrapper[4779]: I0929 20:18:56.256065 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/09948b09-d4c9-4ff2-a27d-396128e19259-must-gather-output" (OuterVolumeSpecName: "must-gather-output") pod "09948b09-d4c9-4ff2-a27d-396128e19259" (UID: "09948b09-d4c9-4ff2-a27d-396128e19259"). InnerVolumeSpecName "must-gather-output". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 20:18:56 crc kubenswrapper[4779]: I0929 20:18:56.284859 4779 reconciler_common.go:293] "Volume detached for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/09948b09-d4c9-4ff2-a27d-396128e19259-must-gather-output\") on node \"crc\" DevicePath \"\"" Sep 29 20:18:56 crc kubenswrapper[4779]: I0929 20:18:56.607752 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-q2hkw_must-gather-f252t_09948b09-d4c9-4ff2-a27d-396128e19259/copy/0.log" Sep 29 20:18:56 crc kubenswrapper[4779]: I0929 20:18:56.608225 4779 generic.go:334] "Generic (PLEG): container finished" podID="09948b09-d4c9-4ff2-a27d-396128e19259" containerID="353ad72802efc09d3b791a966223a9f63627efaf7a218ecca8e43585e461de9a" exitCode=143 Sep 29 20:18:56 crc kubenswrapper[4779]: I0929 20:18:56.608302 4779 scope.go:117] "RemoveContainer" containerID="353ad72802efc09d3b791a966223a9f63627efaf7a218ecca8e43585e461de9a" Sep 29 20:18:56 crc kubenswrapper[4779]: I0929 20:18:56.608572 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-q2hkw/must-gather-f252t" Sep 29 20:18:56 crc kubenswrapper[4779]: I0929 20:18:56.634481 4779 scope.go:117] "RemoveContainer" containerID="345aecbfb226ddf49edd41e9132fd246b7bf58ebba4601b387fdb87d26154810" Sep 29 20:18:56 crc kubenswrapper[4779]: I0929 20:18:56.728753 4779 scope.go:117] "RemoveContainer" containerID="353ad72802efc09d3b791a966223a9f63627efaf7a218ecca8e43585e461de9a" Sep 29 20:18:56 crc kubenswrapper[4779]: E0929 20:18:56.729365 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"353ad72802efc09d3b791a966223a9f63627efaf7a218ecca8e43585e461de9a\": container with ID starting with 353ad72802efc09d3b791a966223a9f63627efaf7a218ecca8e43585e461de9a not found: ID does not exist" containerID="353ad72802efc09d3b791a966223a9f63627efaf7a218ecca8e43585e461de9a" Sep 29 20:18:56 crc kubenswrapper[4779]: I0929 20:18:56.729416 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"353ad72802efc09d3b791a966223a9f63627efaf7a218ecca8e43585e461de9a"} err="failed to get container status \"353ad72802efc09d3b791a966223a9f63627efaf7a218ecca8e43585e461de9a\": rpc error: code = NotFound desc = could not find container \"353ad72802efc09d3b791a966223a9f63627efaf7a218ecca8e43585e461de9a\": container with ID starting with 353ad72802efc09d3b791a966223a9f63627efaf7a218ecca8e43585e461de9a not found: ID does not exist" Sep 29 20:18:56 crc kubenswrapper[4779]: I0929 20:18:56.729462 4779 scope.go:117] "RemoveContainer" containerID="345aecbfb226ddf49edd41e9132fd246b7bf58ebba4601b387fdb87d26154810" Sep 29 20:18:56 crc kubenswrapper[4779]: E0929 20:18:56.730202 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"345aecbfb226ddf49edd41e9132fd246b7bf58ebba4601b387fdb87d26154810\": container with ID starting with 345aecbfb226ddf49edd41e9132fd246b7bf58ebba4601b387fdb87d26154810 not found: ID does not exist" containerID="345aecbfb226ddf49edd41e9132fd246b7bf58ebba4601b387fdb87d26154810" Sep 29 20:18:56 crc kubenswrapper[4779]: I0929 20:18:56.730233 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"345aecbfb226ddf49edd41e9132fd246b7bf58ebba4601b387fdb87d26154810"} err="failed to get container status \"345aecbfb226ddf49edd41e9132fd246b7bf58ebba4601b387fdb87d26154810\": rpc error: code = NotFound desc = could not find container \"345aecbfb226ddf49edd41e9132fd246b7bf58ebba4601b387fdb87d26154810\": container with ID starting with 345aecbfb226ddf49edd41e9132fd246b7bf58ebba4601b387fdb87d26154810 not found: ID does not exist" Sep 29 20:18:57 crc kubenswrapper[4779]: I0929 20:18:57.780271 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09948b09-d4c9-4ff2-a27d-396128e19259" path="/var/lib/kubelet/pods/09948b09-d4c9-4ff2-a27d-396128e19259/volumes" Sep 29 20:19:18 crc kubenswrapper[4779]: I0929 20:19:18.525039 4779 scope.go:117] "RemoveContainer" containerID="b786ea67e3cb0aff4e1644987b428d6109c88bb5e2a692997b38b4c33afe493f" Sep 29 20:19:43 crc kubenswrapper[4779]: I0929 20:19:43.785590 4779 patch_prober.go:28] interesting pod/machine-config-daemon-d5cnr container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 20:19:43 crc kubenswrapper[4779]: I0929 20:19:43.786379 4779 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" podUID="476bc421-1113-455e-bcc8-e207e47dad19" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 20:20:13 crc kubenswrapper[4779]: I0929 20:20:13.784792 4779 patch_prober.go:28] interesting pod/machine-config-daemon-d5cnr container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 20:20:13 crc kubenswrapper[4779]: I0929 20:20:13.785538 4779 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" podUID="476bc421-1113-455e-bcc8-e207e47dad19" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 20:20:43 crc kubenswrapper[4779]: I0929 20:20:43.785090 4779 patch_prober.go:28] interesting pod/machine-config-daemon-d5cnr container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 20:20:43 crc kubenswrapper[4779]: I0929 20:20:43.786017 4779 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" podUID="476bc421-1113-455e-bcc8-e207e47dad19" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 20:20:43 crc kubenswrapper[4779]: I0929 20:20:43.787811 4779 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" Sep 29 20:20:43 crc kubenswrapper[4779]: I0929 20:20:43.789144 4779 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"412cc896daeae2d4e6e741fa0e8faefad46f2d91a3f69b921e809725fd557df2"} pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 29 20:20:43 crc kubenswrapper[4779]: I0929 20:20:43.789292 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" podUID="476bc421-1113-455e-bcc8-e207e47dad19" containerName="machine-config-daemon" containerID="cri-o://412cc896daeae2d4e6e741fa0e8faefad46f2d91a3f69b921e809725fd557df2" gracePeriod=600 Sep 29 20:20:44 crc kubenswrapper[4779]: I0929 20:20:44.885742 4779 generic.go:334] "Generic (PLEG): container finished" podID="476bc421-1113-455e-bcc8-e207e47dad19" containerID="412cc896daeae2d4e6e741fa0e8faefad46f2d91a3f69b921e809725fd557df2" exitCode=0 Sep 29 20:20:44 crc kubenswrapper[4779]: I0929 20:20:44.885829 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" event={"ID":"476bc421-1113-455e-bcc8-e207e47dad19","Type":"ContainerDied","Data":"412cc896daeae2d4e6e741fa0e8faefad46f2d91a3f69b921e809725fd557df2"} Sep 29 20:20:44 crc kubenswrapper[4779]: I0929 20:20:44.886305 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-d5cnr" event={"ID":"476bc421-1113-455e-bcc8-e207e47dad19","Type":"ContainerStarted","Data":"874ed68764b9da376a78dc06b73bc2fac7c976f8472970ab3d433845d78804ad"} Sep 29 20:20:44 crc kubenswrapper[4779]: I0929 20:20:44.886349 4779 scope.go:117] "RemoveContainer" containerID="ce7c9aaefc0e94314f1e9cd9f04a105e1ba3317878327817d7948af06e95f619" var/home/core/zuul-output/logs/crc-cloud-workdir-crc-all-logs.tar.gz0000644000175000000000000000005515066565070024457 0ustar coreroot‹íÁ  ÷Om7 €7šÞ'(var/home/core/zuul-output/logs/crc-cloud/0000755000175000000000000000000015066565071017375 5ustar corerootvar/home/core/zuul-output/artifacts/0000755000175000017500000000000015066554117016517 5ustar corecorevar/home/core/zuul-output/docs/0000755000175000017500000000000015066554117015467 5ustar corecore